From 7d5915e0b59dd761b1734a0affdfc9c153de2fe1 Mon Sep 17 00:00:00 2001 From: hatim dinia Date: Tue, 2 Apr 2024 18:19:28 +0200 Subject: [PATCH 001/147] fix(ui-bc): handle empty constraints data --- .../explore/Modelization/BindingConstraints/index.tsx | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/BindingConstraints/index.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/BindingConstraints/index.tsx index ac25933a74..b8fd1d1d32 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/BindingConstraints/index.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/BindingConstraints/index.tsx @@ -35,10 +35,14 @@ function BindingConstraints() { ); useEffect(() => { - if (constraints.data && !currentConstraintId) { - const firstConstraintId = constraints.data[0].id; - dispatch(setCurrentBindingConst(firstConstraintId)); + const { data } = constraints; + + if (!data || data.length === 0 || currentConstraintId) { + return; } + + const firstConstraintId = data[0].id; + dispatch(setCurrentBindingConst(firstConstraintId)); }, [constraints, currentConstraintId, dispatch]); //////////////////////////////////////////////////////////////// From 1f5f8a7904516aae01c644df054dd482f7e9d8ce Mon Sep 17 00:00:00 2001 From: hatim dinia Date: Wed, 3 Apr 2024 14:32:33 +0200 Subject: [PATCH 002/147] feat(ui-bc): handle version-specific default value for `group` field --- .../BindingConstraints/AddDialog.tsx | 21 ++++++++++--------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/BindingConstraints/AddDialog.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/BindingConstraints/AddDialog.tsx index 35fcb9c90e..ea78b72214 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/BindingConstraints/AddDialog.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/BindingConstraints/AddDialog.tsx @@ -30,15 +30,6 @@ interface Props { reloadConstraintsList: VoidFunction; } -const defaultValues = { - name: "", - group: "default", - enabled: true, - timeStep: TimeStep.HOURLY, - operator: BindingConstraintOperator.LESS, - comments: "", -}; - // TODO rename AddConstraintDialog function AddDialog({ open, @@ -50,6 +41,16 @@ function AddDialog({ const { enqueueSnackbar } = useSnackbar(); const dispatch = useAppDispatch(); const [t] = useTranslation(); + const studyVersion = Number(study.version); + + const defaultValues = { + name: "", + group: studyVersion >= 870 ? "default" : "", + enabled: true, + timeStep: TimeStep.HOURLY, + operator: BindingConstraintOperator.LESS, + comments: "", + }; const operatorOptions = useMemo( () => @@ -145,7 +146,7 @@ function AddDialog({ validateString(v, { existingValues: existingConstraints }), }} /> - {Number(study.version) >= 870 && ( + {studyVersion >= 870 && ( Date: Wed, 3 Apr 2024 17:07:05 +0200 Subject: [PATCH 003/147] fix(api-bc): prevent null values for optional strings --- antarest/study/business/binding_constraint_management.py | 1 + .../model/command/create_binding_constraint.py | 8 ++++---- .../model/command/test_manage_binding_constraints.py | 5 ++++- 3 files changed, 9 insertions(+), 5 deletions(-) diff --git a/antarest/study/business/binding_constraint_management.py b/antarest/study/business/binding_constraint_management.py index a382277bd5..ac012ed0e0 100644 --- a/antarest/study/business/binding_constraint_management.py +++ b/antarest/study/business/binding_constraint_management.py @@ -423,6 +423,7 @@ def constraint_model_adapter(constraint: Mapping[str, Any], version: int) -> Con "terms": constraint.get("terms", []), } + # TODO: Implement a model for version-specific fields. Output filters are sent regardless of the version. if version >= 840: constraint_output["filter_year_by_year"] = constraint.get("filter_year_by_year") or constraint.get( "filter-year-by-year", "" diff --git a/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py b/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py index 5a832b7d29..488596643e 100644 --- a/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py +++ b/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py @@ -75,13 +75,13 @@ class BindingConstraintProperties(BaseModel, extra=Extra.forbid, allow_populatio enabled: bool = True time_step: BindingConstraintFrequency = BindingConstraintFrequency.HOURLY operator: BindingConstraintOperator = BindingConstraintOperator.EQUAL - comments: t.Optional[str] = None - filter_year_by_year: t.Optional[str] = None - filter_synthesis: t.Optional[str] = None + comments: t.Optional[str] = "" + filter_year_by_year: t.Optional[str] = "" + filter_synthesis: t.Optional[str] = "" class BindingConstraintProperties870(BindingConstraintProperties): - group: t.Optional[str] = None + group: t.Optional[str] = "" class BindingConstraintMatrices(BaseModel, extra=Extra.forbid, allow_population_by_field_name=True): diff --git a/tests/variantstudy/model/command/test_manage_binding_constraints.py b/tests/variantstudy/model/command/test_manage_binding_constraints.py index aab13307b1..fc124bdb40 100644 --- a/tests/variantstudy/model/command/test_manage_binding_constraints.py +++ b/tests/variantstudy/model/command/test_manage_binding_constraints.py @@ -92,6 +92,7 @@ def test_manage_binding_constraint(empty_study: FileStudy, command_context: Comm "name": "BD 2", "id": "bd 2", "enabled": False, + "comments": "", "area1.cluster": 50.0, "operator": "both", "type": "daily", @@ -127,6 +128,7 @@ def test_manage_binding_constraint(empty_study: FileStudy, command_context: Comm "id": "bd 1", "enabled": False, "area1%area2": "800.0%30", + "comments": "", "operator": "both", "type": "weekly", } @@ -151,6 +153,7 @@ def test_manage_binding_constraint(empty_study: FileStudy, command_context: Comm "id": "bd 2", "enabled": False, "area1.cluster": 50.0, + "comments": "", "operator": "both", "type": "daily", } @@ -338,7 +341,7 @@ def test_revert(command_context: CommandContext): operator=BindingConstraintOperator.EQUAL, coeffs={"a": [0.3]}, values=hourly_matrix_id, - comments=None, + comments="", command_context=command_context, ) ] From bc04f27827ffa82c472d3167fe9fbe91f96ca8aa Mon Sep 17 00:00:00 2001 From: hatim dinia Date: Thu, 4 Apr 2024 17:45:20 +0200 Subject: [PATCH 004/147] fix(ui-bc): allow broader special chars set --- .../explore/Modelization/BindingConstraints/AddDialog.tsx | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/BindingConstraints/AddDialog.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/BindingConstraints/AddDialog.tsx index ea78b72214..f1af440491 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/BindingConstraints/AddDialog.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/BindingConstraints/AddDialog.tsx @@ -143,7 +143,10 @@ function AddDialog({ control={control} rules={{ validate: (v) => - validateString(v, { existingValues: existingConstraints }), + validateString(v, { + existingValues: existingConstraints, + specialChars: "@&_-()", + }), }} /> {studyVersion >= 870 && ( From 9342997a0796f27b56370d70f9ef05534f270797 Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Tue, 26 Mar 2024 10:04:07 +0100 Subject: [PATCH 005/147] chore: remove unused imports --- antarest/core/configdata/model.py | 3 +-- antarest/core/filesystem_blueprint.py | 1 - antarest/matrixstore/main.py | 1 - antarest/matrixstore/repository.py | 4 ++-- antarest/study/business/areas/properties_management.py | 2 +- antarest/study/storage/variantstudy/repository.py | 2 +- 6 files changed, 5 insertions(+), 8 deletions(-) diff --git a/antarest/core/configdata/model.py b/antarest/core/configdata/model.py index 8db2522b43..cb58784493 100644 --- a/antarest/core/configdata/model.py +++ b/antarest/core/configdata/model.py @@ -2,8 +2,7 @@ from typing import Any, Optional from pydantic import BaseModel -from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, Sequence, String # type: ignore -from sqlalchemy.orm import relationship # type: ignore +from sqlalchemy import Column, Integer, String # type: ignore from antarest.core.persistence import Base diff --git a/antarest/core/filesystem_blueprint.py b/antarest/core/filesystem_blueprint.py index 3b15ebd03a..bf247978b2 100644 --- a/antarest/core/filesystem_blueprint.py +++ b/antarest/core/filesystem_blueprint.py @@ -15,7 +15,6 @@ from starlette.responses import PlainTextResponse, StreamingResponse from antarest.core.config import Config -from antarest.core.jwt import JWTUser from antarest.core.utils.web import APITag from antarest.login.auth import Auth diff --git a/antarest/matrixstore/main.py b/antarest/matrixstore/main.py index ddd29c2d36..b59e3eb87a 100644 --- a/antarest/matrixstore/main.py +++ b/antarest/matrixstore/main.py @@ -1,7 +1,6 @@ from typing import Optional from fastapi import FastAPI -from fastapi_jwt_auth.exceptions import AuthJWTException # type: ignore from antarest.core.config import Config from antarest.core.filetransfer.service import FileTransferManager diff --git a/antarest/matrixstore/repository.py b/antarest/matrixstore/repository.py index 9ab44a69ec..8f3e78f082 100644 --- a/antarest/matrixstore/repository.py +++ b/antarest/matrixstore/repository.py @@ -6,8 +6,8 @@ import numpy as np from filelock import FileLock from numpy import typing as npt -from sqlalchemy import and_, exists # type: ignore -from sqlalchemy.orm import Session, aliased # type: ignore +from sqlalchemy import exists # type: ignore +from sqlalchemy.orm import Session # type: ignore from antarest.core.utils.fastapi_sqlalchemy import db from antarest.matrixstore.model import Matrix, MatrixContent, MatrixData, MatrixDataSet diff --git a/antarest/study/business/areas/properties_management.py b/antarest/study/business/areas/properties_management.py index 7b09a08cf4..96850d6d7b 100644 --- a/antarest/study/business/areas/properties_management.py +++ b/antarest/study/business/areas/properties_management.py @@ -2,7 +2,7 @@ from builtins import sorted from typing import Any, Dict, Iterable, List, Optional, Set, cast -from pydantic import Field, root_validator +from pydantic import root_validator from antarest.study.business.enum_ignore_case import EnumIgnoreCase from antarest.study.business.utils import FieldInfo, FormFieldsBaseModel, execute_or_add_commands diff --git a/antarest/study/storage/variantstudy/repository.py b/antarest/study/storage/variantstudy/repository.py index bf2c979de1..b9f0d88dac 100644 --- a/antarest/study/storage/variantstudy/repository.py +++ b/antarest/study/storage/variantstudy/repository.py @@ -1,6 +1,6 @@ import typing as t -from sqlalchemy.orm import Session, joinedload, subqueryload # type: ignore +from sqlalchemy.orm import Session, joinedload # type: ignore from antarest.core.interfaces.cache import ICache from antarest.core.utils.fastapi_sqlalchemy import db From e6cf00757bff760cfed524e80b5817a398fe1c1d Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Tue, 26 Mar 2024 10:43:35 +0100 Subject: [PATCH 006/147] style: reindent source code --- antarest/launcher/web.py | 4 ++-- antarest/main.py | 2 +- antarest/study/business/table_mode_management.py | 8 ++------ antarest/study/repository.py | 4 +--- antarest/utils.py | 4 +--- 5 files changed, 7 insertions(+), 15 deletions(-) diff --git a/antarest/launcher/web.py b/antarest/launcher/web.py index 051eba2cc4..14eb39aee2 100644 --- a/antarest/launcher/web.py +++ b/antarest/launcher/web.py @@ -231,7 +231,7 @@ def get_solver_versions( "value": "local", }, }, - ) + ), ) -> List[str]: """ Get list of supported solver versions defined in the configuration. @@ -268,7 +268,7 @@ def get_nb_cores( "value": "local", }, }, - ) + ), ) -> Dict[str, int]: """ Retrieve the numer of cores of the launcher. diff --git a/antarest/main.py b/antarest/main.py index 2187088d2b..3973c79ace 100644 --- a/antarest/main.py +++ b/antarest/main.py @@ -61,7 +61,7 @@ class PathType: from antarest.main import PathType parser = argparse.ArgumentParser() - parser.add_argument('--input', type=PathType(file_ok=True, exists=True)) + parser.add_argument("--input", type=PathType(file_ok=True, exists=True)) args = parser.parse_args() print(args.input) diff --git a/antarest/study/business/table_mode_management.py b/antarest/study/business/table_mode_management.py index 8a83c21047..23d8674781 100644 --- a/antarest/study/business/table_mode_management.py +++ b/antarest/study/business/table_mode_management.py @@ -436,17 +436,13 @@ def get_column_value(col: str, data: Dict[str, Any]) -> Any: if table_type == TableTemplateType.AREA: return { - area_id: columns_model.construct( - **{col: get_column_value(col, data) for col in columns} - ) # type: ignore + area_id: columns_model.construct(**{col: get_column_value(col, data) for col in columns}) # type: ignore for area_id, data in glob_object.items() } if table_type == TableTemplateType.BINDING_CONSTRAINT: return { - data["id"]: columns_model.construct( - **{col: get_column_value(col, data) for col in columns} - ) # type: ignore + data["id"]: columns_model.construct(**{col: get_column_value(col, data) for col in columns}) # type: ignore for data in glob_object.values() } diff --git a/antarest/study/repository.py b/antarest/study/repository.py index 93237ff850..b43341df77 100644 --- a/antarest/study/repository.py +++ b/antarest/study/repository.py @@ -23,9 +23,7 @@ def escape_like(string: str, escape_char: str = "\\") -> str: from sqlalchemy_utils import escape_like - query = session.query(User).filter( - User.name.ilike(escape_like('John')) - ) + query = session.query(User).filter(User.name.ilike(escape_like("John"))) Args: string: a string to escape diff --git a/antarest/utils.py b/antarest/utils.py index 39ea094168..1f61717ada 100644 --- a/antarest/utils.py +++ b/antarest/utils.py @@ -101,9 +101,7 @@ def init_db_engine( return engine -def create_event_bus( - application: Optional[FastAPI], config: Config -) -> Tuple[IEventBus, Optional[redis.Redis]]: # type: ignore +def create_event_bus(application: Optional[FastAPI], config: Config) -> Tuple[IEventBus, Optional[redis.Redis]]: # type: ignore redis_client = new_redis_instance(config.redis) if config.redis is not None else None return ( build_eventbus(application, config, True, redis_client), From da3cd716334d4f4c6aea40e0a06bbcd375a23eb6 Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Tue, 26 Mar 2024 10:46:21 +0100 Subject: [PATCH 007/147] docs: improve docstring in `GenerationResultInfoDTO` --- antarest/study/storage/variantstudy/model/model.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/antarest/study/storage/variantstudy/model/model.py b/antarest/study/storage/variantstudy/model/model.py index cd478742b4..33befadb4a 100644 --- a/antarest/study/storage/variantstudy/model/model.py +++ b/antarest/study/storage/variantstudy/model/model.py @@ -12,7 +12,8 @@ class GenerationResultInfoDTO(BaseModel): Attributes: success: A boolean indicating whether the generation process was successful. - details: A list of tuples containing detailed information about the generation process. + details: A list of tuples containing detailed information about the generation process: + (``name``, ``output_status``, ``output_message``). """ success: bool From abcfbef578c108d34ce3da6d2b9d962250a054d3 Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Tue, 26 Mar 2024 13:59:06 +0100 Subject: [PATCH 008/147] feat(commands): add the `command_id` in `ICommand` --- .../business/utils_binding_constraint.py | 16 ++++--------- .../storage/variantstudy/command_factory.py | 22 +++++++++++------ .../command/create_binding_constraint.py | 3 ++- .../variantstudy/model/command/icommand.py | 24 ++++++++++++++----- .../command/remove_binding_constraint.py | 2 +- .../command/update_binding_constraint.py | 3 ++- .../model/command/update_config.py | 2 +- 7 files changed, 44 insertions(+), 28 deletions(-) diff --git a/antarest/study/storage/variantstudy/business/utils_binding_constraint.py b/antarest/study/storage/variantstudy/business/utils_binding_constraint.py index 0779f7e048..37f08b0323 100644 --- a/antarest/study/storage/variantstudy/business/utils_binding_constraint.py +++ b/antarest/study/storage/variantstudy/business/utils_binding_constraint.py @@ -8,7 +8,7 @@ ) from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy -from antarest.study.storage.variantstudy.model.command.common import BindingConstraintOperator, CommandOutput +from antarest.study.storage.variantstudy.model.command.common import BindingConstraintOperator def apply_binding_constraint( @@ -29,7 +29,7 @@ def apply_binding_constraint( filter_year_by_year: t.Optional[str] = None, filter_synthesis: t.Optional[str] = None, group: t.Optional[str] = None, -) -> CommandOutput: +) -> str: version = study_data.config.version binding_constraints[new_key] = { "name": name, @@ -52,19 +52,13 @@ def apply_binding_constraint( if "%" in link_or_cluster: area_1, area_2 = link_or_cluster.split("%") if area_1 not in study_data.config.areas or area_2 not in study_data.config.areas[area_1].links: - return CommandOutput( - status=False, - message=f"Link '{link_or_cluster}' does not exist in binding constraint '{bd_id}'", - ) + return f"Link '{link_or_cluster}' does not exist in binding constraint '{bd_id}'" elif "." in link_or_cluster: # Cluster IDs are stored in lower case in the binding constraints file. area, cluster_id = link_or_cluster.split(".") thermal_ids = {thermal.id.lower() for thermal in study_data.config.areas[area].thermals} if area not in study_data.config.areas or cluster_id.lower() not in thermal_ids: - return CommandOutput( - status=False, - message=f"Cluster '{link_or_cluster}' does not exist in binding constraint '{bd_id}'", - ) + return f"Cluster '{link_or_cluster}' does not exist in binding constraint '{bd_id}'" else: raise NotImplementedError(f"Invalid link or thermal ID: {link_or_cluster}") @@ -95,7 +89,7 @@ def apply_binding_constraint( raise TypeError(repr(matrix_term)) if version >= 870: study_data.tree.save(matrix_term, ["input", "bindingconstraints", f"{bd_id}_{matrix_alias}"]) - return CommandOutput(status=True) + return "" # success def parse_bindings_coeffs_and_save_into_config( diff --git a/antarest/study/storage/variantstudy/command_factory.py b/antarest/study/storage/variantstudy/command_factory.py index 5cf298b15e..33aa9b13c2 100644 --- a/antarest/study/storage/variantstudy/command_factory.py +++ b/antarest/study/storage/variantstudy/command_factory.py @@ -1,4 +1,4 @@ -from typing import List +import typing as t from antarest.core.model import JSON from antarest.matrixstore.service import ISimpleMatrixService @@ -74,14 +74,19 @@ def __init__( patch_service=patch_service, ) - def _to_single_command(self, action: str, args: JSON, version: int) -> ICommand: + def _to_single_command(self, command_id: t.Optional[str], action: str, args: JSON, version: int) -> ICommand: """Convert a single CommandDTO to ICommand.""" if action in COMMAND_MAPPING: command_class = COMMAND_MAPPING[action] - return command_class(**args, command_context=self.command_context, version=version) # type: ignore + return command_class( # type: ignore + **args, + command_context=self.command_context, + version=version, + command_id=command_id, + ) raise NotImplementedError(action) - def to_command(self, command_dto: CommandDTO) -> List[ICommand]: + def to_command(self, command_dto: CommandDTO) -> t.List[ICommand]: """ Convert a CommandDTO to a list of ICommand. @@ -96,12 +101,15 @@ def to_command(self, command_dto: CommandDTO) -> List[ICommand]: """ args = command_dto.args if isinstance(args, dict): - return [self._to_single_command(command_dto.action, args, command_dto.version)] + return [self._to_single_command(command_dto.id, command_dto.action, args, command_dto.version)] elif isinstance(args, list): - return [self._to_single_command(command_dto.action, argument, command_dto.version) for argument in args] + return [ + self._to_single_command(command_dto.id, command_dto.action, argument, command_dto.version) + for argument in args + ] raise NotImplementedError() - def to_commands(self, cmd_dto_list: List[CommandDTO]) -> List[ICommand]: + def to_commands(self, cmd_dto_list: t.List[CommandDTO]) -> t.List[ICommand]: """ Convert a list of CommandDTO to a list of ICommand. diff --git a/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py b/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py index 488596643e..7e1b4d3eaa 100644 --- a/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py +++ b/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py @@ -241,7 +241,7 @@ def _apply(self, study_data: FileStudy) -> CommandOutput: bd_id = transform_name_to_id(self.name) self.validates_and_fills_matrices(specific_matrices=None, version=study_data.config.version, create=True) - return apply_binding_constraint( + err_msg = apply_binding_constraint( study_data, binding_constraints, str(new_key), @@ -260,6 +260,7 @@ def _apply(self, study_data: FileStudy) -> CommandOutput: self.filter_synthesis, self.group, ) + return CommandOutput(status=not err_msg, message=err_msg) def to_dto(self) -> CommandDTO: dto = super().to_dto() diff --git a/antarest/study/storage/variantstudy/model/command/icommand.py b/antarest/study/storage/variantstudy/model/command/icommand.py index 6a17c34c10..3fd31f58fe 100644 --- a/antarest/study/storage/variantstudy/model/command/icommand.py +++ b/antarest/study/storage/variantstudy/model/command/icommand.py @@ -1,6 +1,7 @@ import logging +import typing as t +import uuid from abc import ABC, abstractmethod -from typing import TYPE_CHECKING, Any, Dict, List, Tuple from pydantic import BaseModel, Extra @@ -11,7 +12,7 @@ from antarest.study.storage.variantstudy.model.command_context import CommandContext from antarest.study.storage.variantstudy.model.model import CommandDTO -if TYPE_CHECKING: # False at runtime, for mypy +if t.TYPE_CHECKING: # False at runtime, for mypy from antarest.study.storage.variantstudy.business.command_extractor import CommandExtractor MATCH_SIGNATURE_SEPARATOR = "%" @@ -19,12 +20,23 @@ class ICommand(ABC, BaseModel, extra=Extra.forbid, arbitrary_types_allowed=True): + """ + Interface for all commands that can be applied to a study. + + Attributes: + command_id: The ID of the command extracted from the database, if any. + command_name: The name of the command. + version: The version of the command (currently always equal to 1). + command_context: The context of the command. + """ + + command_id: t.Optional[uuid.UUID] = None command_name: CommandName version: int command_context: CommandContext @abstractmethod - def _apply_config(self, study_data: FileStudyTreeConfig) -> Tuple[CommandOutput, Dict[str, Any]]: + def _apply_config(self, study_data: FileStudyTreeConfig) -> t.Tuple[CommandOutput, t.Dict[str, t.Any]]: """ Applies configuration changes to the study data. @@ -112,7 +124,7 @@ def match(self, other: "ICommand", equal: bool = False) -> bool: raise NotImplementedError() @abstractmethod - def _create_diff(self, other: "ICommand") -> List["ICommand"]: + def _create_diff(self, other: "ICommand") -> t.List["ICommand"]: """ Creates a list of commands representing the differences between the current instance and another `ICommand` object. @@ -126,7 +138,7 @@ def _create_diff(self, other: "ICommand") -> List["ICommand"]: """ raise NotImplementedError() - def create_diff(self, other: "ICommand") -> List["ICommand"]: + def create_diff(self, other: "ICommand") -> t.List["ICommand"]: """ Creates a list of commands representing the differences between the current instance and another `ICommand` object. @@ -142,7 +154,7 @@ def create_diff(self, other: "ICommand") -> List["ICommand"]: return self._create_diff(other) @abstractmethod - def get_inner_matrices(self) -> List[str]: + def get_inner_matrices(self) -> t.List[str]: """ Retrieves the list of matrix IDs. """ diff --git a/antarest/study/storage/variantstudy/model/command/remove_binding_constraint.py b/antarest/study/storage/variantstudy/model/command/remove_binding_constraint.py index 2bd52825c6..25b180c49d 100644 --- a/antarest/study/storage/variantstudy/model/command/remove_binding_constraint.py +++ b/antarest/study/storage/variantstudy/model/command/remove_binding_constraint.py @@ -26,7 +26,7 @@ def _apply_config(self, study_data: FileStudyTreeConfig) -> Tuple[CommandOutput, dict(), ) study_data.bindings.remove(next(iter([bind for bind in study_data.bindings if bind.id == self.id]))) - return CommandOutput(status=True), dict() + return CommandOutput(status=True), {} def _apply(self, study_data: FileStudy) -> CommandOutput: if self.id not in [bind.id for bind in study_data.config.bindings]: diff --git a/antarest/study/storage/variantstudy/model/command/update_binding_constraint.py b/antarest/study/storage/variantstudy/model/command/update_binding_constraint.py index 70ad16702f..211ea2f848 100644 --- a/antarest/study/storage/variantstudy/model/command/update_binding_constraint.py +++ b/antarest/study/storage/variantstudy/model/command/update_binding_constraint.py @@ -56,7 +56,7 @@ def _apply(self, study_data: FileStudy) -> CommandOutput: self.validates_and_fills_matrices(specific_matrices=updated_matrices or None, version=study_data.config.version, create=False) # fmt: on - return apply_binding_constraint( + err_msg = apply_binding_constraint( study_data, binding_constraints, new_key, @@ -75,6 +75,7 @@ def _apply(self, study_data: FileStudy) -> CommandOutput: self.filter_synthesis, self.group, ) + return CommandOutput(status=not err_msg, message=err_msg) def to_dto(self) -> CommandDTO: dto = super().to_dto() diff --git a/antarest/study/storage/variantstudy/model/command/update_config.py b/antarest/study/storage/variantstudy/model/command/update_config.py index 91caa6a738..29887d42e1 100644 --- a/antarest/study/storage/variantstudy/model/command/update_config.py +++ b/antarest/study/storage/variantstudy/model/command/update_config.py @@ -27,7 +27,7 @@ class UpdateConfig(ICommand): data: Union[str, int, float, bool, JSON, None] def _apply_config(self, study_data: FileStudyTreeConfig) -> Tuple[CommandOutput, Dict[str, Any]]: - return CommandOutput(status=True, message="ok"), dict() + return CommandOutput(status=True, message="ok"), {} def _apply(self, study_data: FileStudy) -> CommandOutput: url = self.target.split("/") From b9cc6262204cc0f11d045188774deef1607b9d02 Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Wed, 27 Mar 2024 15:35:48 +0100 Subject: [PATCH 009/147] feat(commands): add new details DTO: dictionary with keys 'id', 'name', 'status' and 'msg' --- .../study/storage/variantstudy/model/model.py | 33 +++++++++++++++++-- 1 file changed, 30 insertions(+), 3 deletions(-) diff --git a/antarest/study/storage/variantstudy/model/model.py b/antarest/study/storage/variantstudy/model/model.py index 33befadb4a..e602c0d6e1 100644 --- a/antarest/study/storage/variantstudy/model/model.py +++ b/antarest/study/storage/variantstudy/model/model.py @@ -1,10 +1,38 @@ import typing as t +import uuid + +import typing_extensions as te from pydantic import BaseModel from antarest.core.model import JSON from antarest.study.model import StudyMetadataDTO +LegacyDetailsDTO = t.Tuple[str, bool, str] +""" +Legacy details DTO: triplet of name, output status and output message. +""" + + +class NewDetailsDTO(te.TypedDict): + """ + New details DTO: dictionary with keys 'id', 'name', 'status' and 'msg'. + + Attributes: + id: identifiant de la commande (UUID), + name: nom de la commande, + status: statut de la commande (true ou false), + msg: message de la génération de la commande ou message d'erreur (si le statut est false). + """ + + id: uuid.UUID + name: str + status: bool + msg: str + + +DetailsDTO = t.Union[LegacyDetailsDTO, NewDetailsDTO] + class GenerationResultInfoDTO(BaseModel): """ @@ -12,12 +40,11 @@ class GenerationResultInfoDTO(BaseModel): Attributes: success: A boolean indicating whether the generation process was successful. - details: A list of tuples containing detailed information about the generation process: - (``name``, ``output_status``, ``output_message``). + details: Objects containing detailed information about the generation process. """ success: bool - details: t.MutableSequence[t.Tuple[str, bool, str]] + details: t.MutableSequence[DetailsDTO] class CommandDTO(BaseModel): From 90bf3329a2350ca1e3e54ffa213eae120ff0fb69 Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Wed, 27 Mar 2024 18:31:38 +0100 Subject: [PATCH 010/147] feat(commands): change `VariantCommandGenerator` to use the new details model --- .../study/storage/variantstudy/model/model.py | 3 +- .../variantstudy/snapshot_generator.py | 11 +- .../variantstudy/variant_command_generator.py | 87 ++++----- .../variantstudy/test_snapshot_generator.py | 168 ++++++++++++++---- .../variantstudy/model/test_variant_model.py | 45 ++++- 5 files changed, 227 insertions(+), 87 deletions(-) diff --git a/antarest/study/storage/variantstudy/model/model.py b/antarest/study/storage/variantstudy/model/model.py index e602c0d6e1..96ba21557d 100644 --- a/antarest/study/storage/variantstudy/model/model.py +++ b/antarest/study/storage/variantstudy/model/model.py @@ -2,7 +2,6 @@ import uuid import typing_extensions as te - from pydantic import BaseModel from antarest.core.model import JSON @@ -25,7 +24,7 @@ class NewDetailsDTO(te.TypedDict): msg: message de la génération de la commande ou message d'erreur (si le statut est false). """ - id: uuid.UUID + id: t.Optional[uuid.UUID] name: str status: bool msg: str diff --git a/antarest/study/storage/variantstudy/snapshot_generator.py b/antarest/study/storage/variantstudy/snapshot_generator.py index 138089a35e..ee4532349f 100644 --- a/antarest/study/storage/variantstudy/snapshot_generator.py +++ b/antarest/study/storage/variantstudy/snapshot_generator.py @@ -175,8 +175,15 @@ def _apply_commands( if not results.success: message = f"Failed to generate variant study {variant_study.id}" if results.details: - detail: t.Tuple[str, bool, str] = results.details[-1] - message += f": {detail[2]}" + detail = results.details[-1] + if isinstance(detail, (tuple, list)): + # old format: LegacyDetailsDTO + message += f": {detail[2]}" + elif isinstance(detail, dict): + # new format since v2.17: NewDetailsDTO + message += f": {detail['msg']}" + else: # pragma: no cover + raise NotImplementedError(f"Unexpected detail type: {type(detail)}") raise VariantGenerationError(message) return results diff --git a/antarest/study/storage/variantstudy/variant_command_generator.py b/antarest/study/storage/variantstudy/variant_command_generator.py index ebe934ce15..e2edc39094 100644 --- a/antarest/study/storage/variantstudy/variant_command_generator.py +++ b/antarest/study/storage/variantstudy/variant_command_generator.py @@ -10,13 +10,23 @@ from antarest.study.storage.variantstudy.model.command.common import CommandOutput from antarest.study.storage.variantstudy.model.command.icommand import ICommand from antarest.study.storage.variantstudy.model.dbmodel import VariantStudy -from antarest.study.storage.variantstudy.model.model import GenerationResultInfoDTO +from antarest.study.storage.variantstudy.model.model import GenerationResultInfoDTO, NewDetailsDTO logger = logging.getLogger(__name__) APPLY_CALLBACK = Callable[[ICommand, Union[FileStudyTreeConfig, FileStudy]], CommandOutput] +class CmdNotifier: + def __init__(self, study_id: str, total_count: int) -> None: + self.index = 0 + self.study_id = study_id + self.total_count = total_count + + def __call__(self, x: float) -> None: + logger.info(f"Command {self.index}/{self.total_count} [{self.study_id}] applied in {x}s") + + class VariantCommandGenerator: def __init__(self, study_factory: StudyFactory) -> None: self.study_factory = study_factory @@ -33,53 +43,44 @@ def _generate( # Apply commands results: GenerationResultInfoDTO = GenerationResultInfoDTO(success=True, details=[]) - stopwatch.reset_current() logger.info("Applying commands") - command_index = 0 - total_commands = len(commands) - study_id = metadata.id if metadata is not None else "-" - for command_batch in commands: - command_output_status = True - command_output_message = "" - command_name = command_batch[0].command_name.value if len(command_batch) > 0 else "" + study_id = "-" if metadata is None else metadata.id + + # flatten the list of commands + all_commands = [command for command_batch in commands for command in command_batch] + + # Prepare the stopwatch + cmd_notifier = CmdNotifier(study_id, len(all_commands)) + stopwatch.reset_current() + + # Store all the outputs + for index, cmd in enumerate(all_commands, 1): try: - command_index += 1 - command_output_messages: List[str] = [] - for command in command_batch: - output = applier(command, data) - command_output_messages.append(output.message) - command_output_status = command_output_status and output.status - if not command_output_status: - break - command_output_message = "\n".join(command_output_messages) + output = applier(cmd, data) except Exception as e: - command_output_status = False - command_output_message = f"Error while applying command {command_name}" - logger.error(command_output_message, exc_info=e) - break - finally: - results.details.append( - ( - command_name, - command_output_status, - command_output_message, - ) - ) - results.success = command_output_status - if notifier: - notifier( - command_index - 1, - command_output_status, - command_output_message, - ) - stopwatch.log_elapsed( - lambda x: logger.info( - f"Command {command_index}/{total_commands} [{study_id}] {command.match_signature()} applied in {x}s" - ) + # Unhandled exception + output = CommandOutput( + status=False, + message=f"Error while applying command {cmd.command_name}", ) + logger.error(output.message, exc_info=e) + + detail: NewDetailsDTO = { + "id": cmd.command_id, + "name": cmd.command_name.value, + "status": output.status, + "msg": output.message, + } + results.details.append(detail) + + if notifier: + notifier(index - 1, output.status, output.message) + + cmd_notifier.index = index + stopwatch.log_elapsed(cmd_notifier) + + results.success = all(detail["status"] for detail in results.details) # type: ignore - if not results.success: - break data_type = isinstance(data, FileStudy) stopwatch.log_elapsed( lambda x: logger.info( diff --git a/tests/study/storage/variantstudy/test_snapshot_generator.py b/tests/study/storage/variantstudy/test_snapshot_generator.py index 2365049432..567d0faae6 100644 --- a/tests/study/storage/variantstudy/test_snapshot_generator.py +++ b/tests/study/storage/variantstudy/test_snapshot_generator.py @@ -21,13 +21,29 @@ from antarest.study.model import RawStudy, Study, StudyAdditionalData from antarest.study.storage.rawstudy.raw_study_service import RawStudyService from antarest.study.storage.variantstudy.model.dbmodel import CommandBlock, VariantStudy, VariantStudySnapshot -from antarest.study.storage.variantstudy.model.model import CommandDTO, GenerationResultInfoDTO +from antarest.study.storage.variantstudy.model.model import CommandDTO from antarest.study.storage.variantstudy.snapshot_generator import SnapshotGenerator, search_ref_study from antarest.study.storage.variantstudy.variant_study_service import VariantStudyService from tests.db_statement_recorder import DBStatementRecorder from tests.helpers import with_db_context +class AnyUUID: + """Mock object to match any UUID.""" + + def __init__(self, as_string: bool = False): + self.as_string = as_string + + def __eq__(self, other): + if self.as_string: + try: + uuid.UUID(other) + return True + except ValueError: + return False + return isinstance(other, uuid.UUID) + + def _create_variant( tmp_path: Path, variant_name: str, @@ -851,15 +867,35 @@ def test_generate__nominal_case( assert len(db_recorder.sql_statements) == 5, str(db_recorder) # Check: the variant generation must succeed. - assert results == GenerationResultInfoDTO( - success=True, - details=[ - ("create_area", True, "Area 'North' created"), - ("create_area", True, "Area 'South' created"), - ("create_link", True, "Link between 'north' and 'south' created"), - ("create_cluster", True, "Thermal cluster 'gas_cluster' added to area 'south'."), + assert results.dict() == { + "success": True, + "details": [ + { + "id": AnyUUID(), + "name": "create_area", + "status": True, + "msg": "Area 'North' created", + }, + { + "id": AnyUUID(), + "name": "create_area", + "status": True, + "msg": "Area 'South' created", + }, + { + "id": AnyUUID(), + "name": "create_link", + "status": True, + "msg": "Link between 'north' and 'south' created", + }, + { + "id": AnyUUID(), + "name": "create_cluster", + "status": True, + "msg": "Thermal cluster 'gas_cluster' added to area 'south'.", + }, ], - ) + } # Check: the variant is correctly generated and all commands are applied. snapshot_dir = variant_study.snapshot_dir @@ -907,13 +943,33 @@ def test_generate__nominal_case( assert list(snapshot_dir.parent.iterdir()) == [snapshot_dir] # Check: the notifications are correctly registered. - assert notifier.notifications == [ # type: ignore + assert notifier.notifications == [ { "details": [ - ["create_area", True, "Area 'North' created"], - ["create_area", True, "Area 'South' created"], - ["create_link", True, "Link between 'north' and 'south' created"], - ["create_cluster", True, "Thermal cluster 'gas_cluster' added to area 'south'."], + { + "id": AnyUUID(as_string=True), + "msg": "Area 'North' created", + "name": "create_area", + "status": True, + }, + { + "id": AnyUUID(as_string=True), + "msg": "Area 'South' created", + "name": "create_area", + "status": True, + }, + { + "id": AnyUUID(as_string=True), + "msg": "Link between 'north' and 'south' created", + "name": "create_link", + "status": True, + }, + { + "id": AnyUUID(as_string=True), + "msg": "Thermal cluster 'gas_cluster' added to area 'south'.", + "name": "create_cluster", + "status": True, + }, ], "success": True, } @@ -996,15 +1052,35 @@ def test_generate__with_denormalize_true( ) # Check the results - assert results == GenerationResultInfoDTO( - success=True, - details=[ - ("create_area", True, "Area 'North' created"), - ("create_area", True, "Area 'South' created"), - ("create_link", True, "Link between 'north' and 'south' created"), - ("create_cluster", True, "Thermal cluster 'gas_cluster' added to area 'south'."), + assert results.dict() == { + "success": True, + "details": [ + { + "id": AnyUUID(), + "name": "create_area", + "status": True, + "msg": "Area 'North' created", + }, + { + "id": AnyUUID(), + "name": "create_area", + "status": True, + "msg": "Area 'South' created", + }, + { + "id": AnyUUID(), + "name": "create_link", + "status": True, + "msg": "Link between 'north' and 'south' created", + }, + { + "id": AnyUUID(), + "name": "create_cluster", + "status": True, + "msg": "Thermal cluster 'gas_cluster' added to area 'south'.", + }, ], - ) + } # Check: the matrices are denormalized (we should have TSV files). snapshot_dir = variant_study.snapshot_dir @@ -1099,15 +1175,35 @@ def test_generate__notification_failure( ) # Check the results - assert results == GenerationResultInfoDTO( - success=True, - details=[ - ("create_area", True, "Area 'North' created"), - ("create_area", True, "Area 'South' created"), - ("create_link", True, "Link between 'north' and 'south' created"), - ("create_cluster", True, "Thermal cluster 'gas_cluster' added to area 'south'."), + assert results.dict() == { + "success": True, + "details": [ + { + "id": AnyUUID(), + "name": "create_area", + "status": True, + "msg": "Area 'North' created", + }, + { + "id": AnyUUID(), + "name": "create_area", + "status": True, + "msg": "Area 'South' created", + }, + { + "id": AnyUUID(), + "name": "create_link", + "status": True, + "msg": "Link between 'north' and 'south' created", + }, + { + "id": AnyUUID(), + "name": "create_cluster", + "status": True, + "msg": "Thermal cluster 'gas_cluster' added to area 'south'.", + }, ], - ) + } # Check th logs assert "Something went wrong" in caplog.text @@ -1161,4 +1257,14 @@ def test_generate__variant_of_variant( ) # Check the results - assert results == GenerationResultInfoDTO(success=True, details=[("create_area", True, "Area 'East' created")]) + assert results.dict() == { + "success": True, + "details": [ + { + "id": AnyUUID(), + "name": "create_area", + "status": True, + "msg": "Area 'East' created", + }, + ], + } diff --git a/tests/variantstudy/model/test_variant_model.py b/tests/variantstudy/model/test_variant_model.py index 63ac7293b8..9328364199 100644 --- a/tests/variantstudy/model/test_variant_model.py +++ b/tests/variantstudy/model/test_variant_model.py @@ -12,12 +12,19 @@ from antarest.study.model import RawStudy, StudyAdditionalData from antarest.study.storage.rawstudy.raw_study_service import RawStudyService from antarest.study.storage.variantstudy.business.matrix_constants_generator import GeneratorMatrixConstants -from antarest.study.storage.variantstudy.model.model import CommandDTO, GenerationResultInfoDTO +from antarest.study.storage.variantstudy.model.model import CommandDTO from antarest.study.storage.variantstudy.snapshot_generator import SnapshotGenerator from antarest.study.storage.variantstudy.variant_study_service import VariantStudyService from tests.helpers import with_db_context +class AnyUUID: + """Mock object to match any UUID.""" + + def __eq__(self, other): + return isinstance(other, uuid.UUID) + + class TestVariantStudyService: @pytest.fixture(name="jwt_user") def jwt_user_fixture(self) -> JWTUser: @@ -141,13 +148,33 @@ def test_commands_service( repository=variant_study_service.repository, ) results = generator.generate_snapshot(saved_id, jwt_user, denormalize=False) - assert results == GenerationResultInfoDTO( - success=True, - details=[ - ("create_area", True, "Area 'Yes' created"), - ("create_area", True, "Area 'No' created"), - ("create_link", True, "Link between 'no' and 'yes' created"), - ("create_cluster", True, "Thermal cluster 'cl1' added to area 'yes'."), + assert results.dict() == { + "success": True, + "details": [ + { + "id": AnyUUID(), + "name": "create_area", + "status": True, + "msg": "Area 'Yes' created", + }, + { + "id": AnyUUID(), + "name": "create_area", + "status": True, + "msg": "Area 'No' created", + }, + { + "id": AnyUUID(), + "name": "create_link", + "status": True, + "msg": "Link between 'no' and 'yes' created", + }, + { + "id": AnyUUID(), + "name": "create_cluster", + "status": True, + "msg": "Thermal cluster 'cl1' added to area 'yes'.", + }, ], - ) + } assert study.snapshot.id == study.id From 72216c67171ad5cad87741bcbaba5730f4cff2e4 Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Mon, 8 Apr 2024 19:28:54 +0200 Subject: [PATCH 011/147] test(commands): move `AnyUUID` mock in the `helpers` module --- tests/helpers.py | 24 +++++++++++++++++++ .../variantstudy/test_snapshot_generator.py | 17 +------------ .../variantstudy/model/test_variant_model.py | 9 +------ 3 files changed, 26 insertions(+), 24 deletions(-) diff --git a/tests/helpers.py b/tests/helpers.py index a9cfdb9ee8..0736eafd59 100644 --- a/tests/helpers.py +++ b/tests/helpers.py @@ -1,4 +1,5 @@ import time +import uuid from datetime import datetime, timedelta, timezone from functools import wraps from typing import Any, Callable, Dict, List, cast @@ -76,3 +77,26 @@ def auto_retry_assert(predicate: Callable[..., bool], timeout: int = 2, delay: f return time.sleep(delay) raise AssertionError() + + +class AnyUUID: + """Mock object to match any UUID.""" + + def __init__(self, as_string: bool = False): + self.as_string = as_string + + def __eq__(self, other: object) -> bool: + if isinstance(other, AnyUUID): + return True + if isinstance(other, str): + if self.as_string: + try: + uuid.UUID(other) + return True + except ValueError: + return False + return False + return isinstance(other, uuid.UUID) + + def __ne__(self, other: object) -> bool: + return not self.__eq__(other) diff --git a/tests/study/storage/variantstudy/test_snapshot_generator.py b/tests/study/storage/variantstudy/test_snapshot_generator.py index 567d0faae6..41aaa82e64 100644 --- a/tests/study/storage/variantstudy/test_snapshot_generator.py +++ b/tests/study/storage/variantstudy/test_snapshot_generator.py @@ -24,26 +24,11 @@ from antarest.study.storage.variantstudy.model.model import CommandDTO from antarest.study.storage.variantstudy.snapshot_generator import SnapshotGenerator, search_ref_study from antarest.study.storage.variantstudy.variant_study_service import VariantStudyService +from helpers import AnyUUID from tests.db_statement_recorder import DBStatementRecorder from tests.helpers import with_db_context -class AnyUUID: - """Mock object to match any UUID.""" - - def __init__(self, as_string: bool = False): - self.as_string = as_string - - def __eq__(self, other): - if self.as_string: - try: - uuid.UUID(other) - return True - except ValueError: - return False - return isinstance(other, uuid.UUID) - - def _create_variant( tmp_path: Path, variant_name: str, diff --git a/tests/variantstudy/model/test_variant_model.py b/tests/variantstudy/model/test_variant_model.py index 9328364199..98c73b949f 100644 --- a/tests/variantstudy/model/test_variant_model.py +++ b/tests/variantstudy/model/test_variant_model.py @@ -15,14 +15,7 @@ from antarest.study.storage.variantstudy.model.model import CommandDTO from antarest.study.storage.variantstudy.snapshot_generator import SnapshotGenerator from antarest.study.storage.variantstudy.variant_study_service import VariantStudyService -from tests.helpers import with_db_context - - -class AnyUUID: - """Mock object to match any UUID.""" - - def __eq__(self, other): - return isinstance(other, uuid.UUID) +from tests.helpers import AnyUUID, with_db_context class TestVariantStudyService: From eccf738df2c11bf5fc3c3f6a18cbda712441f884 Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Mon, 8 Apr 2024 19:31:32 +0200 Subject: [PATCH 012/147] feat(commands): the `id` field within the `NewDetailsDTO` class is mandatory --- antarest/study/storage/variantstudy/model/model.py | 10 +++++----- .../storage/variantstudy/variant_command_generator.py | 4 +++- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/antarest/study/storage/variantstudy/model/model.py b/antarest/study/storage/variantstudy/model/model.py index 96ba21557d..e170bf4383 100644 --- a/antarest/study/storage/variantstudy/model/model.py +++ b/antarest/study/storage/variantstudy/model/model.py @@ -18,13 +18,13 @@ class NewDetailsDTO(te.TypedDict): New details DTO: dictionary with keys 'id', 'name', 'status' and 'msg'. Attributes: - id: identifiant de la commande (UUID), - name: nom de la commande, - status: statut de la commande (true ou false), - msg: message de la génération de la commande ou message d'erreur (si le statut est false). + id: command identifier (UUID) if it exists. + name: command name. + status: command status (true or false). + msg: command generation message or error message (if the status is false). """ - id: t.Optional[uuid.UUID] + id: uuid.UUID name: str status: bool msg: str diff --git a/antarest/study/storage/variantstudy/variant_command_generator.py b/antarest/study/storage/variantstudy/variant_command_generator.py index e2edc39094..a08ec86b4c 100644 --- a/antarest/study/storage/variantstudy/variant_command_generator.py +++ b/antarest/study/storage/variantstudy/variant_command_generator.py @@ -1,5 +1,6 @@ import logging import shutil +import uuid from pathlib import Path from typing import Callable, List, Optional, Tuple, Union, cast @@ -65,8 +66,9 @@ def _generate( ) logger.error(output.message, exc_info=e) + # noinspection PyTypeChecker detail: NewDetailsDTO = { - "id": cmd.command_id, + "id": uuid.UUID(int=0) if cmd.command_id is None else cmd.command_id, "name": cmd.command_name.value, "status": output.status, "msg": output.message, From 4a9c0e2666a8f69085c52eec17a5de30dc7d3537 Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Mon, 8 Apr 2024 19:48:27 +0200 Subject: [PATCH 013/147] refactor(commands): change the signature of `_to_single_command` --- antarest/study/storage/variantstudy/command_factory.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/antarest/study/storage/variantstudy/command_factory.py b/antarest/study/storage/variantstudy/command_factory.py index 33aa9b13c2..c4803ce0cc 100644 --- a/antarest/study/storage/variantstudy/command_factory.py +++ b/antarest/study/storage/variantstudy/command_factory.py @@ -74,7 +74,7 @@ def __init__( patch_service=patch_service, ) - def _to_single_command(self, command_id: t.Optional[str], action: str, args: JSON, version: int) -> ICommand: + def _to_single_command(self, action: str, args: JSON, version: int, command_id: t.Optional[str]) -> ICommand: """Convert a single CommandDTO to ICommand.""" if action in COMMAND_MAPPING: command_class = COMMAND_MAPPING[action] @@ -101,10 +101,10 @@ def to_command(self, command_dto: CommandDTO) -> t.List[ICommand]: """ args = command_dto.args if isinstance(args, dict): - return [self._to_single_command(command_dto.id, command_dto.action, args, command_dto.version)] + return [self._to_single_command(command_dto.action, args, command_dto.version, command_dto.id)] elif isinstance(args, list): return [ - self._to_single_command(command_dto.id, command_dto.action, argument, command_dto.version) + self._to_single_command(command_dto.action, argument, command_dto.version, command_dto.id) for argument in args ] raise NotImplementedError() From d8b55ef9e49835b44fceec55c2124f9cb7ed73fa Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Mon, 8 Apr 2024 19:58:16 +0200 Subject: [PATCH 014/147] test(commands): correct import of `AnyUUID` in UT --- tests/study/storage/variantstudy/test_snapshot_generator.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/study/storage/variantstudy/test_snapshot_generator.py b/tests/study/storage/variantstudy/test_snapshot_generator.py index 41aaa82e64..e9de3da131 100644 --- a/tests/study/storage/variantstudy/test_snapshot_generator.py +++ b/tests/study/storage/variantstudy/test_snapshot_generator.py @@ -24,9 +24,8 @@ from antarest.study.storage.variantstudy.model.model import CommandDTO from antarest.study.storage.variantstudy.snapshot_generator import SnapshotGenerator, search_ref_study from antarest.study.storage.variantstudy.variant_study_service import VariantStudyService -from helpers import AnyUUID from tests.db_statement_recorder import DBStatementRecorder -from tests.helpers import with_db_context +from tests.helpers import AnyUUID, with_db_context def _create_variant( From 9ef57e49b994dc2b452459faca207e38d2554993 Mon Sep 17 00:00:00 2001 From: Mohamed Abdel Wedoud Date: Tue, 26 Mar 2024 17:36:56 +0100 Subject: [PATCH 015/147] fix(study-search): update pagination query regarding joins on groups and tags --- antarest/study/repository.py | 23 +- .../studies_blueprint/test_get_studies.py | 148 +++-- tests/study/test_repository.py | 623 +++++++++--------- 3 files changed, 408 insertions(+), 386 deletions(-) diff --git a/antarest/study/repository.py b/antarest/study/repository.py index 93237ff850..0674f396a7 100644 --- a/antarest/study/repository.py +++ b/antarest/study/repository.py @@ -257,9 +257,17 @@ def get_all( # pagination if pagination.page_nb or pagination.page_size: - q = q.offset(pagination.page_nb * pagination.page_size).limit(pagination.page_size) - - studies: t.Sequence[Study] = q.all() + limit = pagination.page_size + offset = pagination.page_nb * pagination.page_size + end = offset + limit + if sort_by is None: + q = q.order_by(entity.name.asc()) + if study_filter.groups or study_filter.tags: + studies: t.Sequence[Study] = q.all()[offset:end] + return studies + q = q.offset(offset).limit(limit) + + studies = q.all() return studies def count_studies(self, study_filter: StudyFilter = StudyFilter()) -> int: @@ -305,12 +313,9 @@ def _search_studies( else: q = q.filter(not_(RawStudy.missing.is_(None))) - if study_filter.users is not None: - q = q.options(joinedload(entity.owner)) - if study_filter.groups is not None: - q = q.options(joinedload(entity.groups)) - if study_filter.tags is not None: - q = q.options(joinedload(entity.tags)) + q = q.options(joinedload(entity.owner)) + q = q.options(joinedload(entity.groups)) + q = q.options(joinedload(entity.tags)) q = q.options(joinedload(entity.additional_data)) if study_filter.managed is not None: diff --git a/tests/integration/studies_blueprint/test_get_studies.py b/tests/integration/studies_blueprint/test_get_studies.py index 2cff53f047..af8f790f20 100644 --- a/tests/integration/studies_blueprint/test_get_studies.py +++ b/tests/integration/studies_blueprint/test_get_studies.py @@ -926,7 +926,7 @@ def test_get_studies__access_permissions(self, client: TestClient, admin_access_ res = client.post( STUDIES_URL, headers={"Authorization": f"Bearer {admin_access_token}"}, - params={"name": f"dummy_{study}"}, + params={"name": f"dummy_{study[6:]}"}, ) assert res.status_code in CREATE_STATUS_CODES, res.json() study_id = res.json() @@ -953,13 +953,13 @@ def test_get_studies__access_permissions(self, client: TestClient, admin_access_ assert res.status_code == 200, res.json() # studies that have owner but no groups for study, study_info in { - "study_10": {"owner": "user_1"}, - "study_11": {"owner": "user_2"}, + "study_X10": {"owner": "user_1"}, + "study_X11": {"owner": "user_2"}, }.items(): res = client.post( STUDIES_URL, headers={"Authorization": f"Bearer {admin_access_token}"}, - params={"name": f"dummy_{study}"}, + params={"name": f"dummy_{study[6:]}"}, ) assert res.status_code in CREATE_STATUS_CODES, res.json() study_id = res.json() @@ -986,7 +986,7 @@ def test_get_studies__access_permissions(self, client: TestClient, admin_access_ res = client.post( STUDIES_URL, headers={"Authorization": f"Bearer {admin_access_token}"}, - params={"name": f"dummy_{study}"}, + params={"name": f"dummy_{study[6:]}"}, ) assert res.status_code in CREATE_STATUS_CODES, res.json() study_id = res.json() @@ -1008,16 +1008,16 @@ def test_get_studies__access_permissions(self, client: TestClient, admin_access_ # create variant studies with neither owner nor groups for study, study_info in { - "study_12": {"public_mode": None}, - "study_13": {"public_mode": PublicMode.READ.value}, - "study_14": {"public_mode": PublicMode.EDIT.value}, - "study_15": {"public_mode": PublicMode.EXECUTE.value}, - "study_16": {"public_mode": PublicMode.FULL.value}, + "study_X12": {"public_mode": None}, + "study_X13": {"public_mode": PublicMode.READ.value}, + "study_X14": {"public_mode": PublicMode.EDIT.value}, + "study_X15": {"public_mode": PublicMode.EXECUTE.value}, + "study_X16": {"public_mode": PublicMode.FULL.value}, }.items(): res = client.post( STUDIES_URL, headers={"Authorization": f"Bearer {admin_access_token}"}, - params={"name": f"dummy_{study}"}, + params={"name": f"dummy_{study[6:]}"}, ) assert res.status_code in CREATE_STATUS_CODES, res.json() study_id = res.json() @@ -1040,12 +1040,12 @@ def test_get_studies__access_permissions(self, client: TestClient, admin_access_ # create raw studies for user_1 and user_2 that are part of some groups # studies that have owner and groups for study, study_info in { - "study_17": {"owner": "user_1", "groups": ["group_1"]}, - "study_18": {"owner": "user_1", "groups": ["group_2"]}, - "study_20": {"owner": "user_2", "groups": ["group_1"]}, - "study_21": {"owner": "user_2", "groups": ["group_2"]}, - "study_23": {"owner": "user_1", "groups": ["group_1", "group_2"]}, - "study_24": {"owner": "user_2", "groups": ["group_1", "group_2"]}, + "study_X17": {"owner": "user_1", "groups": ["group_1"]}, + "study_X18": {"owner": "user_1", "groups": ["group_2"]}, + "study_X20": {"owner": "user_2", "groups": ["group_1"]}, + "study_X21": {"owner": "user_2", "groups": ["group_2"]}, + "study_X23": {"owner": "user_1", "groups": ["group_1", "group_2"]}, + "study_X24": {"owner": "user_2", "groups": ["group_1", "group_2"]}, }.items(): res = client.post( STUDIES_URL, @@ -1070,8 +1070,8 @@ def test_get_studies__access_permissions(self, client: TestClient, admin_access_ assert res.status_code == 200, res.json() # studies that have owner but no groups for study, study_info in { - "study_26": {"owner": "user_1"}, - "study_27": {"owner": "user_2"}, + "study_X26": {"owner": "user_1"}, + "study_X27": {"owner": "user_2"}, }.items(): res = client.post( STUDIES_URL, @@ -1089,9 +1089,9 @@ def test_get_studies__access_permissions(self, client: TestClient, admin_access_ assert res.status_code == 200, res.json() # studies that have groups but no owner for study, study_info in { - "study_19": {"groups": ["group_1"]}, - "study_22": {"groups": ["group_2"]}, - "study_25": {"groups": ["group_1", "group_2"]}, + "study_X19": {"groups": ["group_1"]}, + "study_X22": {"groups": ["group_2"]}, + "study_X25": {"groups": ["group_1", "group_2"]}, }.items(): res = client.post( STUDIES_URL, @@ -1111,11 +1111,11 @@ def test_get_studies__access_permissions(self, client: TestClient, admin_access_ # create raw studies with neither owner nor groups for study, study_info in { - "study_28": {"public_mode": None}, - "study_29": {"public_mode": PublicMode.READ.value}, - "study_30": {"public_mode": PublicMode.EDIT.value}, - "study_31": {"public_mode": PublicMode.EXECUTE.value}, - "study_32": {"public_mode": PublicMode.FULL.value}, + "study_X28": {"public_mode": None}, + "study_X29": {"public_mode": PublicMode.READ.value}, + "study_X30": {"public_mode": PublicMode.EDIT.value}, + "study_X31": {"public_mode": PublicMode.EXECUTE.value}, + "study_X32": {"public_mode": PublicMode.FULL.value}, }.items(): res = client.post( STUDIES_URL, @@ -1136,13 +1136,13 @@ def test_get_studies__access_permissions(self, client: TestClient, admin_access_ # create studies for user_3 that is not part of any group # variant studies for study, study_info in { - "study_33": {"groups": ["group_1"]}, - "study_35": {"groups": []}, + "study_X33": {"groups": ["group_1"]}, + "study_X35": {"groups": []}, }.items(): res = client.post( STUDIES_URL, headers={"Authorization": f"Bearer {admin_access_token}"}, - params={"name": f"dummy_{study}"}, + params={"name": f"dummy_{study[6:]}"}, ) assert res.status_code in CREATE_STATUS_CODES, res.json() study_id = res.json() @@ -1169,8 +1169,8 @@ def test_get_studies__access_permissions(self, client: TestClient, admin_access_ assert res.status_code == 200, res.json() # raw studies for study, study_info in { - "study_34": {"groups": ["group_2"]}, - "study_36": {"groups": []}, + "study_X34": {"groups": ["group_2"]}, + "study_X36": {"groups": []}, }.items(): res = client.post( STUDIES_URL, @@ -1198,14 +1198,14 @@ def test_get_studies__access_permissions(self, client: TestClient, admin_access_ res = client.post( STUDIES_URL, headers={"Authorization": f"Bearer {admin_access_token}"}, - params={"name": "dummy_study_37"}, + params={"name": "dummy_37"}, ) assert res.status_code in CREATE_STATUS_CODES, res.json() study_id = res.json() res = client.post( f"{STUDIES_URL}/{study_id}/variants", headers={"Authorization": f"Bearer {admin_access_token}"}, - params={"name": "study_37"}, + params={"name": "study_X37"}, ) assert res.status_code in CREATE_STATUS_CODES, res.json() study_id = res.json() @@ -1215,11 +1215,11 @@ def test_get_studies__access_permissions(self, client: TestClient, admin_access_ headers={"Authorization": f"Bearer {admin_access_token}"}, ) assert res.status_code == 200, res.json() - studies_ids_mapping["study_37"] = study_id + studies_ids_mapping["study_X37"] = study_id res = client.post( STUDIES_URL, headers={"Authorization": f"Bearer {admin_access_token}"}, - params={"name": "study_38"}, + params={"name": "study_X38"}, ) assert res.status_code in CREATE_STATUS_CODES, res.json() study_id = res.json() @@ -1228,7 +1228,7 @@ def test_get_studies__access_permissions(self, client: TestClient, admin_access_ headers={"Authorization": f"Bearer {admin_access_token}"}, ) assert res.status_code == 200, res.json() - studies_ids_mapping["study_38"] = study_id + studies_ids_mapping["study_X38"] = study_id # verify the studies creation was done correctly and that admin has access to all studies all_studies = set(studies_ids_mapping.values()) @@ -1277,55 +1277,55 @@ def test_get_studies__access_permissions(self, client: TestClient, admin_access_ "groups": ["group_1", "group_2"], "public_mode": PublicMode.NONE, }, - "study_10": {"type": "variantstudy", "owner": "user_1", "groups": None, "public_mode": PublicMode.NONE}, - "study_11": {"type": "variantstudy", "owner": "user_2", "groups": None, "public_mode": PublicMode.NONE}, - "study_12": {"type": "variantstudy", "owner": None, "groups": None, "public_mode": PublicMode.NONE}, - "study_13": {"type": "variantstudy", "owner": None, "groups": None, "public_mode": PublicMode.READ}, - "study_14": {"type": "variantstudy", "owner": None, "groups": None, "public_mode": PublicMode.EDIT}, - "study_15": {"type": "variantstudy", "owner": None, "groups": None, "public_mode": PublicMode.EXECUTE}, - "study_16": {"type": "variantstudy", "owner": None, "groups": None, "public_mode": PublicMode.FULL}, - "study_17": {"type": "rawstudy", "owner": "user_1", "groups": ["group_1"], "public_mode": PublicMode.NONE}, - "study_18": {"type": "rawstudy", "owner": "user_1", "groups": ["group_2"], "public_mode": PublicMode.NONE}, - "study_19": {"type": "rawstudy", "owner": None, "groups": ["group_1"], "public_mode": PublicMode.NONE}, - "study_20": {"type": "rawstudy", "owner": "user_2", "groups": ["group_1"], "public_mode": PublicMode.NONE}, - "study_21": {"type": "rawstudy", "owner": "user_2", "groups": ["group_2"], "public_mode": PublicMode.NONE}, - "study_22": {"type": "rawstudy", "owner": None, "groups": ["group_2"], "public_mode": PublicMode.NONE}, - "study_23": { + "study_X10": {"type": "variantstudy", "owner": "user_1", "groups": None, "public_mode": PublicMode.NONE}, + "study_X11": {"type": "variantstudy", "owner": "user_2", "groups": None, "public_mode": PublicMode.NONE}, + "study_X12": {"type": "variantstudy", "owner": None, "groups": None, "public_mode": PublicMode.NONE}, + "study_X13": {"type": "variantstudy", "owner": None, "groups": None, "public_mode": PublicMode.READ}, + "study_X14": {"type": "variantstudy", "owner": None, "groups": None, "public_mode": PublicMode.EDIT}, + "study_X15": {"type": "variantstudy", "owner": None, "groups": None, "public_mode": PublicMode.EXECUTE}, + "study_X16": {"type": "variantstudy", "owner": None, "groups": None, "public_mode": PublicMode.FULL}, + "study_X17": {"type": "rawstudy", "owner": "user_1", "groups": ["group_1"], "public_mode": PublicMode.NONE}, + "study_X18": {"type": "rawstudy", "owner": "user_1", "groups": ["group_2"], "public_mode": PublicMode.NONE}, + "study_X19": {"type": "rawstudy", "owner": None, "groups": ["group_1"], "public_mode": PublicMode.NONE}, + "study_X20": {"type": "rawstudy", "owner": "user_2", "groups": ["group_1"], "public_mode": PublicMode.NONE}, + "study_X21": {"type": "rawstudy", "owner": "user_2", "groups": ["group_2"], "public_mode": PublicMode.NONE}, + "study_X22": {"type": "rawstudy", "owner": None, "groups": ["group_2"], "public_mode": PublicMode.NONE}, + "study_X23": { "type": "rawstudy", "owner": "user_1", "groups": ["group_1", "group_2"], "public_mode": PublicMode.NONE, }, - "study_24": { + "study_X24": { "type": "rawstudy", "owner": "user_2", "groups": ["group_1", "group_2"], "public_mode": PublicMode.NONE, }, - "study_25": { + "study_X25": { "type": "rawstudy", "owner": None, "groups": ["group_1", "group_2"], "public_mode": PublicMode.NONE, }, - "study_26": {"type": "rawstudy", "owner": "user_1", "groups": None, "public_mode": PublicMode.NONE}, - "study_27": {"type": "rawstudy", "owner": "user_2", "groups": None, "public_mode": PublicMode.NONE}, - "study_28": {"type": "rawstudy", "owner": None, "groups": None, "public_mode": PublicMode.NONE}, - "study_29": {"type": "rawstudy", "owner": None, "groups": None, "public_mode": PublicMode.READ}, - "study_30": {"type": "rawstudy", "owner": None, "groups": None, "public_mode": PublicMode.EDIT}, - "study_31": {"type": "rawstudy", "owner": None, "groups": None, "public_mode": PublicMode.EXECUTE}, - "study_32": {"type": "rawstudy", "owner": None, "groups": None, "public_mode": PublicMode.FULL}, - "study_33": { + "study_X26": {"type": "rawstudy", "owner": "user_1", "groups": None, "public_mode": PublicMode.NONE}, + "study_X27": {"type": "rawstudy", "owner": "user_2", "groups": None, "public_mode": PublicMode.NONE}, + "study_X28": {"type": "rawstudy", "owner": None, "groups": None, "public_mode": PublicMode.NONE}, + "study_X29": {"type": "rawstudy", "owner": None, "groups": None, "public_mode": PublicMode.READ}, + "study_X30": {"type": "rawstudy", "owner": None, "groups": None, "public_mode": PublicMode.EDIT}, + "study_X31": {"type": "rawstudy", "owner": None, "groups": None, "public_mode": PublicMode.EXECUTE}, + "study_X32": {"type": "rawstudy", "owner": None, "groups": None, "public_mode": PublicMode.FULL}, + "study_X33": { "type": "variantstudy", "owner": "user_3", "groups": ["group_1"], "public_mode": PublicMode.NONE, }, - "study_34": {"type": "rawstudy", "owner": "user_3", "groups": ["group_2"], "public_mode": PublicMode.NONE}, - "study_35": {"type": "variantstudy", "owner": "user_3", "groups": None, "public_mode": PublicMode.NONE}, - "study_36": {"type": "rawstudy", "owner": "user_3", "groups": None, "public_mode": PublicMode.NONE}, - "study_37": {"type": "variantstudy", "owner": None, "groups": ["group_3"], "public_mode": PublicMode.NONE}, - "study_38": {"type": "rawstudy", "owner": None, "groups": ["group_3"], "public_mode": PublicMode.NONE}, + "study_X34": {"type": "rawstudy", "owner": "user_3", "groups": ["group_2"], "public_mode": PublicMode.NONE}, + "study_X35": {"type": "variantstudy", "owner": "user_3", "groups": None, "public_mode": PublicMode.NONE}, + "study_X36": {"type": "rawstudy", "owner": "user_3", "groups": None, "public_mode": PublicMode.NONE}, + "study_X37": {"type": "variantstudy", "owner": None, "groups": ["group_3"], "public_mode": PublicMode.NONE}, + "study_X38": {"type": "rawstudy", "owner": None, "groups": ["group_3"], "public_mode": PublicMode.NONE}, } res = client.get(STUDIES_URL, headers={"Authorization": f"Bearer {admin_access_token}"}) assert res.status_code == LIST_STATUS_CODE, res.json() @@ -1375,9 +1375,10 @@ def test_get_studies__access_permissions(self, client: TestClient, admin_access_ ] for request_groups_numbers, expected_studies_numbers in requests_params_expected_studies: request_groups_ids = [groups_ids[f"group_{group_number}"] for group_number in request_groups_numbers] - expected_studies = { - studies_ids_mapping[f"study_{study_number}"] for study_number in expected_studies_numbers - } + expected_studies = [ + studies_ids_mapping[f"study_{(study_number if int(study_number) <= 9 else 'X'+study_number)}"] + for study_number in expected_studies_numbers + ] res = client.get( STUDIES_URL, headers={"Authorization": f"Bearer {users_tokens['user_1']}"}, @@ -1385,7 +1386,7 @@ def test_get_studies__access_permissions(self, client: TestClient, admin_access_ ) assert res.status_code == LIST_STATUS_CODE, res.json() study_map = res.json() - assert not expected_studies.difference(set(study_map)) + assert not set(expected_studies).difference(set(study_map)) assert not all_studies.difference(expected_studies).intersection(set(study_map)) # test pagination res = client.get( @@ -1397,6 +1398,7 @@ def test_get_studies__access_permissions(self, client: TestClient, admin_access_ ) assert res.status_code == LIST_STATUS_CODE, res.json() assert len(res.json()) == max(0, min(2, len(expected_studies) - 2)) + # assert list(res.json()) == expected_studies[2:4] # user_2 access requests_params_expected_studies = [ @@ -1418,7 +1420,8 @@ def test_get_studies__access_permissions(self, client: TestClient, admin_access_ for request_groups_numbers, expected_studies_numbers in requests_params_expected_studies: request_groups_ids = [groups_ids[f"group_{group_number}"] for group_number in request_groups_numbers] expected_studies = { - studies_ids_mapping[f"study_{study_number}"] for study_number in expected_studies_numbers + studies_ids_mapping[f"study_{(study_number if int(study_number) <= 9 else 'X'+study_number)}"] + for study_number in expected_studies_numbers } res = client.get( STUDIES_URL, @@ -1444,7 +1447,8 @@ def test_get_studies__access_permissions(self, client: TestClient, admin_access_ for request_groups_numbers, expected_studies_numbers in requests_params_expected_studies: request_groups_ids = [groups_ids[f"group_{group_number}"] for group_number in request_groups_numbers] expected_studies = { - studies_ids_mapping[f"study_{study_number}"] for study_number in expected_studies_numbers + studies_ids_mapping[f"study_{(study_number if int(study_number) <= 9 else 'X'+study_number)}"] + for study_number in expected_studies_numbers } res = client.get( STUDIES_URL, diff --git a/tests/study/test_repository.py b/tests/study/test_repository.py index e6becac349..f7314cdaaa 100644 --- a/tests/study/test_repository.py +++ b/tests/study/test_repository.py @@ -128,14 +128,14 @@ def test_get_all__incompatible_case( icache: Mock = Mock(spec=ICache) repository = StudyMetadataRepository(cache_service=icache, session=db_session) - study_1 = VariantStudy(id=1) - study_2 = VariantStudy(id=2) - study_3 = VariantStudy(id=3) - study_4 = VariantStudy(id=4) - study_5 = RawStudy(id=5, missing=datetime.datetime.now(), workspace=DEFAULT_WORKSPACE_NAME) - study_6 = RawStudy(id=6, missing=datetime.datetime.now(), workspace=test_workspace) - study_7 = RawStudy(id=7, missing=None, workspace=test_workspace) - study_8 = RawStudy(id=8, missing=None, workspace=DEFAULT_WORKSPACE_NAME) + study_1 = VariantStudy(id=1, name="study-1") + study_2 = VariantStudy(id=2, name="study-2") + study_3 = VariantStudy(id=3, name="study-3") + study_4 = VariantStudy(id=4, name="study-4") + study_5 = RawStudy(id=5, name="study-5", missing=datetime.datetime.now(), workspace=DEFAULT_WORKSPACE_NAME) + study_6 = RawStudy(id=6, name="study-6", missing=datetime.datetime.now(), workspace=test_workspace) + study_7 = RawStudy(id=7, name="study-7", missing=None, workspace=test_workspace) + study_8 = RawStudy(id=8, name="study-8", missing=None, workspace=DEFAULT_WORKSPACE_NAME) db_session.add_all([study_1, study_2, study_3, study_4, study_5, study_6, study_7, study_8]) db_session.commit() @@ -179,21 +179,21 @@ def test_get_all__incompatible_case( @pytest.mark.parametrize( "name, expected_ids", [ - ("", {"1", "2", "3", "4", "5", "6", "7", "8"}), - ("specie", {"1", "2", "3", "4", "5", "6", "7", "8"}), - ("prefix-specie", {"2", "3", "6", "7"}), - ("variant", {"1", "2", "3", "4"}), - ("variant-suffix", {"3", "4"}), - ("raw", {"5", "6", "7", "8"}), - ("raw-suffix", {"7", "8"}), - ("prefix-variant", set()), - ("specie-suffix", set()), + ("", ["1", "2", "3", "4", "5", "6", "7", "8"]), + ("specie", ["1", "2", "3", "4", "5", "6", "7", "8"]), + ("prefix-specie", ["2", "3", "6", "7"]), + ("variant", ["1", "2", "3", "4"]), + ("variant-suffix", ["3", "4"]), + ("raw", ["5", "6", "7", "8"]), + ("raw-suffix", ["7", "8"]), + ("prefix-variant", []), + ("specie-suffix", []), ], ) def test_get_all__study_name_filter( db_session: Session, name: str, - expected_ids: t.Set[str], + expected_ids: t.List[str], ) -> None: icache: Mock = Mock(spec=ICache) repository = StudyMetadataRepository(cache_service=icache, session=db_session) @@ -207,6 +207,10 @@ def test_get_all__study_name_filter( study_7 = RawStudy(id=7, name="prefix-specie-raw-suffix") study_8 = RawStudy(id=8, name="specie-raw-suffix") + mapping_ids_names = { + str(s.id): s.name for s in [study_1, study_2, study_3, study_4, study_5, study_6, study_7, study_8] + } + db_session.add_all([study_1, study_2, study_3, study_4, study_5, study_6, study_7, study_8]) db_session.commit() @@ -225,7 +229,7 @@ def test_get_all__study_name_filter( assert len(db_recorder.sql_statements) == 1, str(db_recorder) if expected_ids is not None: - assert {s.id for s in all_studies} == expected_ids + assert sorted(s.id for s in all_studies) == expected_ids # test pagination with DBStatementRecorder(db_session.bind) as db_recorder: @@ -234,34 +238,36 @@ def test_get_all__study_name_filter( pagination=StudyPagination(page_nb=1, page_size=2), ) assert len(all_studies) == max(0, min(len(expected_ids) - 2, 2)) + name_sorted_expected_studies = sorted(expected_ids, key=lambda s_id: mapping_ids_names[s_id]) + assert sorted(s.id for s in all_studies) == name_sorted_expected_studies[2:4] assert len(db_recorder.sql_statements) == 1, str(db_recorder) @pytest.mark.parametrize( "managed, expected_ids", [ - (None, {"1", "2", "3", "4", "5", "6", "7", "8"}), - (True, {"1", "2", "3", "4", "5", "8"}), - (False, {"6", "7"}), + (None, ["1", "2", "3", "4", "5", "6", "7", "8"]), + (True, ["1", "2", "3", "4", "5", "8"]), + (False, ["6", "7"]), ], ) def test_get_all__managed_study_filter( db_session: Session, managed: t.Optional[bool], - expected_ids: t.Set[str], + expected_ids: t.List[str], ) -> None: test_workspace = "test-workspace" icache: Mock = Mock(spec=ICache) repository = StudyMetadataRepository(cache_service=icache, session=db_session) - study_1 = VariantStudy(id=1) - study_2 = VariantStudy(id=2) - study_3 = VariantStudy(id=3) - study_4 = VariantStudy(id=4) - study_5 = RawStudy(id=5, workspace=DEFAULT_WORKSPACE_NAME) - study_6 = RawStudy(id=6, workspace=test_workspace) - study_7 = RawStudy(id=7, workspace=test_workspace) - study_8 = RawStudy(id=8, workspace=DEFAULT_WORKSPACE_NAME) + study_1 = VariantStudy(id=1, name="study-1") + study_2 = VariantStudy(id=2, name="study-2") + study_3 = VariantStudy(id=3, name="study-3") + study_4 = VariantStudy(id=4, name="study-4") + study_5 = RawStudy(id=5, name="study-5", workspace=DEFAULT_WORKSPACE_NAME) + study_6 = RawStudy(id=6, name="study-6", workspace=test_workspace) + study_7 = RawStudy(id=7, name="study-7", workspace=test_workspace) + study_8 = RawStudy(id=8, name="study-8", workspace=DEFAULT_WORKSPACE_NAME) db_session.add_all([study_1, study_2, study_3, study_4, study_5, study_6, study_7, study_8]) db_session.commit() @@ -281,7 +287,7 @@ def test_get_all__managed_study_filter( assert len(db_recorder.sql_statements) == 1, str(db_recorder) if expected_ids is not None: - assert {s.id for s in all_studies} == expected_ids + assert sorted(s.id for s in all_studies) == expected_ids # test pagination with DBStatementRecorder(db_session.bind) as db_recorder: @@ -290,29 +296,30 @@ def test_get_all__managed_study_filter( pagination=StudyPagination(page_nb=1, page_size=2), ) assert len(all_studies) == max(0, min(len(expected_ids) - 2, 2)) + assert sorted(s.id for s in all_studies) == expected_ids[2:4] assert len(db_recorder.sql_statements) == 1, str(db_recorder) @pytest.mark.parametrize( "archived, expected_ids", [ - (None, {"1", "2", "3", "4"}), - (True, {"1", "3"}), - (False, {"2", "4"}), + (None, ["1", "2", "3", "4"]), + (True, ["1", "3"]), + (False, ["2", "4"]), ], ) def test_get_all__archived_study_filter( db_session: Session, archived: t.Optional[bool], - expected_ids: t.Set[str], + expected_ids: t.List[str], ) -> None: icache: Mock = Mock(spec=ICache) repository = StudyMetadataRepository(cache_service=icache, session=db_session) - study_1 = VariantStudy(id=1, archived=True) - study_2 = VariantStudy(id=2, archived=False) - study_3 = RawStudy(id=3, archived=True) - study_4 = RawStudy(id=4, archived=False) + study_1 = VariantStudy(id=1, name="study-1", archived=True) + study_2 = VariantStudy(id=2, name="study-2", archived=False) + study_3 = RawStudy(id=3, name="study-3", archived=True) + study_4 = RawStudy(id=4, name="study-4", archived=False) db_session.add_all([study_1, study_2, study_3, study_4]) db_session.commit() @@ -331,7 +338,7 @@ def test_get_all__archived_study_filter( assert len(db_recorder.sql_statements) == 1, str(db_recorder) if expected_ids is not None: - assert {s.id for s in all_studies} == expected_ids + assert sorted(s.id for s in all_studies) == expected_ids # test pagination with DBStatementRecorder(db_session.bind) as db_recorder: @@ -346,23 +353,23 @@ def test_get_all__archived_study_filter( @pytest.mark.parametrize( "variant, expected_ids", [ - (None, {"1", "2", "3", "4"}), - (True, {"1", "2"}), - (False, {"3", "4"}), + (None, ["1", "2", "3", "4"]), + (True, ["1", "2"]), + (False, ["3", "4"]), ], ) def test_get_all__variant_study_filter( db_session: Session, variant: t.Optional[bool], - expected_ids: t.Set[str], + expected_ids: t.List[str], ) -> None: icache: Mock = Mock(spec=ICache) repository = StudyMetadataRepository(cache_service=icache, session=db_session) - study_1 = VariantStudy(id=1) - study_2 = VariantStudy(id=2) - study_3 = RawStudy(id=3) - study_4 = RawStudy(id=4) + study_1 = VariantStudy(id=1, name="study-1") + study_2 = VariantStudy(id=2, name="study-2") + study_3 = RawStudy(id=3, name="study-3") + study_4 = RawStudy(id=4, name="study-4") db_session.add_all([study_1, study_2, study_3, study_4]) db_session.commit() @@ -381,7 +388,7 @@ def test_get_all__variant_study_filter( assert len(db_recorder.sql_statements) == 1, str(db_recorder) if expected_ids is not None: - assert {s.id for s in all_studies} == expected_ids + assert sorted(s.id for s in all_studies) == expected_ids # test pagination with DBStatementRecorder(db_session.bind) as db_recorder: @@ -396,25 +403,25 @@ def test_get_all__variant_study_filter( @pytest.mark.parametrize( "versions, expected_ids", [ - ([], {"1", "2", "3", "4"}), - (["1", "2"], {"1", "2", "3", "4"}), - (["1"], {"1", "3"}), - (["2"], {"2", "4"}), - (["3"], set()), + ([], ["1", "2", "3", "4"]), + (["1", "2"], ["1", "2", "3", "4"]), + (["1"], ["1", "3"]), + (["2"], ["2", "4"]), + (["3"], []), ], ) def test_get_all__study_version_filter( db_session: Session, versions: t.Sequence[str], - expected_ids: t.Set[str], + expected_ids: t.List[str], ) -> None: icache: Mock = Mock(spec=ICache) repository = StudyMetadataRepository(cache_service=icache, session=db_session) - study_1 = VariantStudy(id=1, version="1") - study_2 = VariantStudy(id=2, version="2") - study_3 = RawStudy(id=3, version="1") - study_4 = RawStudy(id=4, version="2") + study_1 = VariantStudy(id=1, name="study-1", version="1") + study_2 = VariantStudy(id=2, name="study-2", version="2") + study_3 = RawStudy(id=3, name="study-3", version="1") + study_4 = RawStudy(id=4, name="study-4", version="2") db_session.add_all([study_1, study_2, study_3, study_4]) db_session.commit() @@ -433,7 +440,7 @@ def test_get_all__study_version_filter( assert len(db_recorder.sql_statements) == 1, str(db_recorder) if expected_ids is not None: - assert {s.id for s in all_studies} == expected_ids + assert sorted(s.id for s in all_studies) == expected_ids # test pagination with DBStatementRecorder(db_session.bind) as db_recorder: @@ -448,17 +455,17 @@ def test_get_all__study_version_filter( @pytest.mark.parametrize( "users, expected_ids", [ - ([], {"1", "2", "3", "4"}), - (["1000", "2000"], {"1", "2", "3", "4"}), - (["1000"], {"1", "3"}), - (["2000"], {"2", "4"}), - (["3000"], set()), + ([], ["1", "2", "3", "4"]), + (["1000", "2000"], ["1", "2", "3", "4"]), + (["1000"], ["1", "3"]), + (["2000"], ["2", "4"]), + (["3000"], []), ], ) def test_get_all__study_users_filter( db_session: Session, users: t.Sequence["int"], - expected_ids: t.Set[str], + expected_ids: t.List[str], ) -> None: icache: Mock = Mock(spec=ICache) repository = StudyMetadataRepository(cache_service=icache, session=db_session) @@ -466,10 +473,10 @@ def test_get_all__study_users_filter( test_user_1 = User(id=1000) test_user_2 = User(id=2000) - study_1 = VariantStudy(id=1, owner=test_user_1) - study_2 = VariantStudy(id=2, owner=test_user_2) - study_3 = RawStudy(id=3, owner=test_user_1) - study_4 = RawStudy(id=4, owner=test_user_2) + study_1 = VariantStudy(id=1, name="study-1", owner=test_user_1) + study_2 = VariantStudy(id=2, name="study-2", owner=test_user_2) + study_3 = RawStudy(id=3, name="study-3", owner=test_user_1) + study_4 = RawStudy(id=4, name="study-4", owner=test_user_2) db_session.add_all([test_user_1, test_user_2]) db_session.commit() @@ -492,7 +499,7 @@ def test_get_all__study_users_filter( assert len(db_recorder.sql_statements) == 1, str(db_recorder) if expected_ids is not None: - assert {s.id for s in all_studies} == expected_ids + assert sorted(s.id for s in all_studies) == expected_ids # test pagination with DBStatementRecorder(db_session.bind) as db_recorder: @@ -501,23 +508,24 @@ def test_get_all__study_users_filter( pagination=StudyPagination(page_nb=1, page_size=2), ) assert len(all_studies) == max(0, min(len(expected_ids) - 2, 2)) + assert sorted(s.id for s in all_studies) == expected_ids[2:4] assert len(db_recorder.sql_statements) == 1, str(db_recorder) @pytest.mark.parametrize( "groups, expected_ids", [ - ([], {"1", "2", "3", "4"}), - (["1000", "2000"], {"1", "2", "3", "4"}), - (["1000"], {"1", "2", "4"}), - (["2000"], {"2", "3"}), - (["3000"], set()), + ([], ["1", "2", "3", "4"]), + (["1000", "2000"], ["1", "2", "3", "4"]), + (["1000"], ["1", "2", "4"]), + (["2000"], ["2", "3"]), + (["3000"], []), ], ) def test_get_all__study_groups_filter( db_session: Session, groups: t.Sequence[str], - expected_ids: t.Set[str], + expected_ids: t.List[str], ) -> None: icache: Mock = Mock(spec=ICache) repository = StudyMetadataRepository(cache_service=icache, session=db_session) @@ -525,10 +533,10 @@ def test_get_all__study_groups_filter( test_group_1 = Group(id=1000) test_group_2 = Group(id=2000) - study_1 = VariantStudy(id=1, groups=[test_group_1]) - study_2 = VariantStudy(id=2, groups=[test_group_1, test_group_2]) - study_3 = RawStudy(id=3, groups=[test_group_2]) - study_4 = RawStudy(id=4, groups=[test_group_1]) + study_1 = VariantStudy(id=1, name="study-1", groups=[test_group_1]) + study_2 = VariantStudy(id=2, name="study-2", groups=[test_group_1, test_group_2]) + study_3 = RawStudy(id=3, name="study-3", groups=[test_group_2]) + study_4 = RawStudy(id=4, name="study-4", groups=[test_group_1]) db_session.add_all([test_group_1, test_group_2]) db_session.commit() @@ -551,7 +559,7 @@ def test_get_all__study_groups_filter( assert len(db_recorder.sql_statements) == 1, str(db_recorder) if expected_ids is not None: - assert {s.id for s in all_studies} == expected_ids + assert sorted(s.id for s in all_studies) == expected_ids # test pagination with DBStatementRecorder(db_session.bind) as db_recorder: @@ -560,32 +568,33 @@ def test_get_all__study_groups_filter( pagination=StudyPagination(page_nb=1, page_size=2), ) assert len(all_studies) == max(0, min(len(expected_ids) - 2, 2)) + assert sorted(s.id for s in all_studies) == expected_ids[2:4] assert len(db_recorder.sql_statements) == 1, str(db_recorder) @pytest.mark.parametrize( "study_ids, expected_ids", [ - ([], {"1", "2", "3", "4"}), - (["1", "2", "3", "4"], {"1", "2", "3", "4"}), - (["1", "2", "4"], {"1", "2", "4"}), - (["2", "3"], {"2", "3"}), - (["2"], {"2"}), - (["3000"], set()), + ([], ["1", "2", "3", "4"]), + (["1", "2", "3", "4"], ["1", "2", "3", "4"]), + (["1", "2", "4"], ["1", "2", "4"]), + (["2", "3"], ["2", "3"]), + (["2"], ["2"]), + (["3000"], []), ], ) def test_get_all__study_ids_filter( db_session: Session, study_ids: t.Sequence[str], - expected_ids: t.Set[str], + expected_ids: t.List[str], ) -> None: icache: Mock = Mock(spec=ICache) repository = StudyMetadataRepository(cache_service=icache, session=db_session) - study_1 = VariantStudy(id=1) - study_2 = VariantStudy(id=2) - study_3 = RawStudy(id=3) - study_4 = RawStudy(id=4) + study_1 = VariantStudy(id=1, name="study-1") + study_2 = VariantStudy(id=2, name="study-2") + study_3 = RawStudy(id=3, name="study-3") + study_4 = RawStudy(id=4, name="study-4") db_session.add_all([study_1, study_2, study_3, study_4]) db_session.commit() @@ -605,7 +614,7 @@ def test_get_all__study_ids_filter( assert len(db_recorder.sql_statements) == 1, str(db_recorder) if expected_ids is not None: - assert {s.id for s in all_studies} == expected_ids + assert sorted(s.id for s in all_studies) == expected_ids # test pagination with DBStatementRecorder(db_session.bind) as db_recorder: @@ -614,29 +623,30 @@ def test_get_all__study_ids_filter( pagination=StudyPagination(page_nb=1, page_size=2), ) assert len(all_studies) == max(0, min(len(expected_ids) - 2, 2)) + assert sorted(s.id for s in all_studies) == expected_ids[2:4] assert len(db_recorder.sql_statements) == 1, str(db_recorder) @pytest.mark.parametrize( "exists, expected_ids", [ - (None, {"1", "2", "3", "4"}), - (True, {"1", "2", "4"}), - (False, {"3"}), + (None, ["1", "2", "3", "4"]), + (True, ["1", "2", "4"]), + (False, ["3"]), ], ) def test_get_all__study_existence_filter( db_session: Session, exists: t.Optional[bool], - expected_ids: t.Set[str], + expected_ids: t.List[str], ) -> None: icache: Mock = Mock(spec=ICache) repository = StudyMetadataRepository(cache_service=icache, session=db_session) - study_1 = VariantStudy(id=1) - study_2 = VariantStudy(id=2) - study_3 = RawStudy(id=3, missing=datetime.datetime.now()) - study_4 = RawStudy(id=4) + study_1 = VariantStudy(id=1, name="study-1") + study_2 = VariantStudy(id=2, name="study-2") + study_3 = RawStudy(id=3, name="study-3", missing=datetime.datetime.now()) + study_4 = RawStudy(id=4, name="study-4") db_session.add_all([study_1, study_2, study_3, study_4]) db_session.commit() @@ -656,7 +666,7 @@ def test_get_all__study_existence_filter( assert len(db_recorder.sql_statements) == 1, str(db_recorder) if expected_ids is not None: - assert {s.id for s in all_studies} == expected_ids + assert sorted(s.id for s in all_studies) == expected_ids # test pagination with DBStatementRecorder(db_session.bind) as db_recorder: @@ -665,30 +675,31 @@ def test_get_all__study_existence_filter( pagination=StudyPagination(page_nb=1, page_size=2), ) assert len(all_studies) == max(0, min(len(expected_ids) - 2, 2)) + assert sorted(s.id for s in all_studies) == expected_ids[2:4] assert len(db_recorder.sql_statements) == 1, str(db_recorder) @pytest.mark.parametrize( "workspace, expected_ids", [ - ("", {"1", "2", "3", "4"}), - ("workspace-1", {"3"}), - ("workspace-2", {"4"}), - ("workspace-3", set()), + ("", ["1", "2", "3", "4"]), + ("workspace-1", ["3"]), + ("workspace-2", ["4"]), + ("workspace-3", []), ], ) def test_get_all__study_workspace_filter( db_session: Session, workspace: str, - expected_ids: t.Set[str], + expected_ids: t.List[str], ) -> None: icache: Mock = Mock(spec=ICache) repository = StudyMetadataRepository(cache_service=icache, session=db_session) - study_1 = VariantStudy(id=1) - study_2 = VariantStudy(id=2) - study_3 = RawStudy(id=3, workspace="workspace-1") - study_4 = RawStudy(id=4, workspace="workspace-2") + study_1 = VariantStudy(id=1, name="study-1") + study_2 = VariantStudy(id=2, name="study-2") + study_3 = RawStudy(id=3, name="study-3", workspace="workspace-1") + study_4 = RawStudy(id=4, name="study-4", workspace="workspace-2") db_session.add_all([study_1, study_2, study_3, study_4]) db_session.commit() @@ -708,7 +719,7 @@ def test_get_all__study_workspace_filter( assert len(db_recorder.sql_statements) == 1, str(db_recorder) if expected_ids is not None: - assert {s.id for s in all_studies} == expected_ids + assert sorted(s.id for s in all_studies) == expected_ids # test pagination with DBStatementRecorder(db_session.bind) as db_recorder: @@ -717,32 +728,33 @@ def test_get_all__study_workspace_filter( pagination=StudyPagination(page_nb=1, page_size=2), ) assert len(all_studies) == max(0, min(len(expected_ids) - 2, 2)) + assert sorted(s.id for s in all_studies) == expected_ids[2:4] assert len(db_recorder.sql_statements) == 1, str(db_recorder) @pytest.mark.parametrize( "folder, expected_ids", [ - ("", {"1", "2", "3", "4"}), - ("/home/folder-", {"1", "2", "3", "4"}), - ("/home/folder-1", {"1", "3"}), - ("/home/folder-2", {"2", "4"}), - ("/home/folder-3", set()), - ("folder-1", set()), + ("", ["1", "2", "3", "4"]), + ("/home/folder-", ["1", "2", "3", "4"]), + ("/home/folder-1", ["1", "3"]), + ("/home/folder-2", ["2", "4"]), + ("/home/folder-3", []), + ("folder-1", []), ], ) def test_get_all__study_folder_filter( db_session: Session, folder: str, - expected_ids: t.Set[str], + expected_ids: t.List[str], ) -> None: icache: Mock = Mock(spec=ICache) repository = StudyMetadataRepository(cache_service=icache, session=db_session) - study_1 = VariantStudy(id=1, folder="/home/folder-1") - study_2 = VariantStudy(id=2, folder="/home/folder-2") - study_3 = RawStudy(id=3, folder="/home/folder-1") - study_4 = RawStudy(id=4, folder="/home/folder-2") + study_1 = VariantStudy(id=1, name="study-1", folder="/home/folder-1") + study_2 = VariantStudy(id=2, name="study-2", folder="/home/folder-2") + study_3 = RawStudy(id=3, name="study-3", folder="/home/folder-1") + study_4 = RawStudy(id=4, name="study-4", folder="/home/folder-2") db_session.add_all([study_1, study_2, study_3, study_4]) db_session.commit() @@ -762,7 +774,7 @@ def test_get_all__study_folder_filter( assert len(db_recorder.sql_statements) == 1, str(db_recorder) if expected_ids is not None: - assert {s.id for s in all_studies} == expected_ids + assert sorted(s.id for s in all_studies) == expected_ids # test pagination with DBStatementRecorder(db_session.bind) as db_recorder: @@ -774,22 +786,20 @@ def test_get_all__study_folder_filter( assert len(db_recorder.sql_statements) == 1, str(db_recorder) -# TODO fix this test and all the others -@pytest.mark.skip(reason="This bug is to be fixed asap, the sql query is not working as expected") @pytest.mark.parametrize( "tags, expected_ids", [ - ([], {"1", "2", "3", "4", "5", "6", "7", "8"}), - (["decennial"], {"2", "4", "6", "8"}), - (["winter_transition"], {"3", "4", "7", "8"}), - (["decennial", "winter_transition"], {"2", "3", "4", "6", "7", "8"}), - (["no-study-tag"], set()), + ([], ["1", "2", "3", "4", "5", "6", "7", "8"]), + (["decennial"], ["2", "4", "6", "8"]), + (["winter_transition"], ["3", "4", "7", "8"]), + (["decennial", "winter_transition"], ["2", "3", "4", "6", "7", "8"]), + (["no-study-tag"], []), ], ) def test_get_all__study_tags_filter( db_session: Session, tags: t.Sequence[str], - expected_ids: t.Set[str], + expected_ids: t.List[str], ) -> None: icache: Mock = Mock(spec=ICache) repository = StudyMetadataRepository(cache_service=icache, session=db_session) @@ -798,14 +808,14 @@ def test_get_all__study_tags_filter( test_tag_2 = Tag(label="decennial") test_tag_3 = Tag(label="Winter_Transition") # note the different case - study_1 = VariantStudy(id=1, tags=[test_tag_1]) - study_2 = VariantStudy(id=2, tags=[test_tag_2]) - study_3 = VariantStudy(id=3, tags=[test_tag_3]) - study_4 = VariantStudy(id=4, tags=[test_tag_2, test_tag_3]) - study_5 = RawStudy(id=5, tags=[test_tag_1]) - study_6 = RawStudy(id=6, tags=[test_tag_2]) - study_7 = RawStudy(id=7, tags=[test_tag_3]) - study_8 = RawStudy(id=8, tags=[test_tag_2, test_tag_3]) + study_1 = VariantStudy(id=1, name="study-1", tags=[test_tag_1]) + study_2 = VariantStudy(id=2, name="study-2", tags=[test_tag_2]) + study_3 = VariantStudy(id=3, name="study-3", tags=[test_tag_3]) + study_4 = VariantStudy(id=4, name="study-4", tags=[test_tag_2, test_tag_3]) + study_5 = RawStudy(id=5, name="study-5", tags=[test_tag_1]) + study_6 = RawStudy(id=6, name="study-6", tags=[test_tag_2]) + study_7 = RawStudy(id=7, name="study-7", tags=[test_tag_3]) + study_8 = RawStudy(id=8, name="study-8", tags=[test_tag_2, test_tag_3]) db_session.add_all([study_1, study_2, study_3, study_4, study_5, study_6, study_7, study_8]) db_session.commit() @@ -826,7 +836,7 @@ def test_get_all__study_tags_filter( assert len(db_recorder.sql_statements) == 1, str(db_recorder) if expected_ids is not None: - assert {s.id for s in all_studies} == expected_ids + assert sorted(s.id for s in all_studies) == expected_ids # test pagination with DBStatementRecorder(db_session.bind) as db_recorder: @@ -835,6 +845,7 @@ def test_get_all__study_tags_filter( pagination=StudyPagination(page_nb=1, page_size=2), ) assert len(all_studies) == max(0, min(len(expected_ids) - 2, 2)) + assert sorted(s.id for s in all_studies) == expected_ids[2:4] assert len(db_recorder.sql_statements) == 1, str(db_recorder) @@ -842,42 +853,42 @@ def test_get_all__study_tags_filter( "user_id, study_groups, expected_ids", [ # fmt: off - (101, [], {"1", "2", "5", "6", "7", "8", "9", "10", "13", "14", "15", "16", "17", "18", - "21", "22", "23", "24", "25", "26", "29", "30", "31", "32", "34"}), - (101, ["101"], {"1", "7", "8", "9", "17", "23", "24", "25"}), - (101, ["102"], {"2", "5", "6", "7", "8", "9", "18", "21", "22", "23", "24", "25", "34"}), - (101, ["103"], set()), - (101, ["101", "102"], {"1", "2", "5", "6", "7", "8", "9", "17", "18", "21", "22", "23", "24", "25", "34"}), - (101, ["101", "103"], {"1", "7", "8", "9", "17", "23", "24", "25"}), - (101, ["102", "103"], {"2", "5", "6", "7", "8", "9", "18", "21", "22", "23", "24", "25", "34"}), - (101, ["101", "102", "103"], {"1", "2", "5", "6", "7", "8", "9", "17", "18", "21", "22", - "23", "24", "25", "34"}), - (102, [], {"1", "3", "4", "5", "7", "8", "9", "11", "13", "14", "15", "16", "17", "19", - "20", "21", "23", "24", "25", "27", "29", "30", "31", "32", "33"}), - (102, ["101"], {"1", "3", "4", "7", "8", "9", "17", "19", "20", "23", "24", "25", "33"}), - (102, ["102"], {"5", "7", "8", "9", "21", "23", "24", "25"}), - (102, ["103"], set()), - (102, ["101", "102"], {"1", "3", "4", "5", "7", "8", "9", "17", "19", "20", "21", "23", "24", "25", "33"}), - (102, ["101", "103"], {"1", "3", "4", "7", "8", "9", "17", "19", "20", "23", "24", "25", "33"}), - (102, ["102", "103"], {"5", "7", "8", "9", "21", "23", "24", "25"}), - (102, ["101", "102", "103"], {"1", "3", "4", "5", "7", "8", "9", "17", "19", "20", "21", - "23", "24", "25", "33"}), - (103, [], {"13", "14", "15", "16", "29", "30", "31", "32", "33", "34", "35", "36"}), - (103, ["101"], {"33"}), - (103, ["102"], {"34"}), - (103, ["103"], set()), - (103, ["101", "102"], {"33", "34"}), - (103, ["101", "103"], {"33"}), - (103, ["102", "103"], {"34"}), - (103, ["101", "102", "103"], {"33", "34"}), - (None, [], set()), - (None, ["101"], set()), - (None, ["102"], set()), - (None, ["103"], set()), - (None, ["101", "102"], set()), - (None, ["101", "103"], set()), - (None, ["102", "103"], set()), - (None, ["101", "102", "103"], set()), + (101, [], ["1", "2", "5", "6", "7", "8", "9", "10", "13", "14", "15", "16", "17", "18", + "21", "22", "23", "24", "25", "26", "29", "30", "31", "32", "34"]), + (101, ["101"], ["1", "7", "8", "9", "17", "23", "24", "25"]), + (101, ["102"], ["2", "5", "6", "7", "8", "9", "18", "21", "22", "23", "24", "25", "34"]), + (101, ["103"], []), + (101, ["101", "102"], ["1", "2", "5", "6", "7", "8", "9", "17", "18", "21", "22", "23", "24", "25", "34"]), + (101, ["101", "103"], ["1", "7", "8", "9", "17", "23", "24", "25"]), + (101, ["102", "103"], ["2", "5", "6", "7", "8", "9", "18", "21", "22", "23", "24", "25", "34"]), + (101, ["101", "102", "103"], ["1", "2", "5", "6", "7", "8", "9", "17", "18", "21", "22", + "23", "24", "25", "34"]), + (102, [], ["1", "3", "4", "5", "7", "8", "9", "11", "13", "14", "15", "16", "17", "19", + "20", "21", "23", "24", "25", "27", "29", "30", "31", "32", "33"]), + (102, ["101"], ["1", "3", "4", "7", "8", "9", "17", "19", "20", "23", "24", "25", "33"]), + (102, ["102"], ["5", "7", "8", "9", "21", "23", "24", "25"]), + (102, ["103"], []), + (102, ["101", "102"], ["1", "3", "4", "5", "7", "8", "9", "17", "19", "20", "21", "23", "24", "25", "33"]), + (102, ["101", "103"], ["1", "3", "4", "7", "8", "9", "17", "19", "20", "23", "24", "25", "33"]), + (102, ["102", "103"], ["5", "7", "8", "9", "21", "23", "24", "25"]), + (102, ["101", "102", "103"], ["1", "3", "4", "5", "7", "8", "9", "17", "19", "20", "21", + "23", "24", "25", "33"]), + (103, [], ["13", "14", "15", "16", "29", "30", "31", "32", "33", "34", "35", "36"]), + (103, ["101"], ["33"]), + (103, ["102"], ["34"]), + (103, ["103"], []), + (103, ["101", "102"], ["33", "34"]), + (103, ["101", "103"], ["33"]), + (103, ["102", "103"], ["34"]), + (103, ["101", "102", "103"], ["33", "34"]), + (None, [], []), + (None, ["101"], []), + (None, ["102"], []), + (None, ["103"], []), + (None, ["101", "102"], []), + (None, ["101", "103"], []), + (None, ["102", "103"], []), + (None, ["101", "102", "103"], []), # fmt: on ], ) @@ -885,7 +896,7 @@ def test_get_all__non_admin_permissions_filter( db_session: Session, user_id: t.Optional[int], study_groups: t.Sequence[str], - expected_ids: t.Set[str], + expected_ids: t.List[str], ) -> None: icache: Mock = Mock(spec=ICache) repository = StudyMetadataRepository(cache_service=icache, session=db_session) @@ -901,54 +912,54 @@ def test_get_all__non_admin_permissions_filter( user_groups_mapping = {101: [group_2.id], 102: [group_1.id], 103: []} # create variant studies for user_1 and user_2 that are part of some groups - study_1 = VariantStudy(id=1, owner=user_1, groups=[group_1]) - study_2 = VariantStudy(id=2, owner=user_1, groups=[group_2]) - study_3 = VariantStudy(id=3, groups=[group_1]) - study_4 = VariantStudy(id=4, owner=user_2, groups=[group_1]) - study_5 = VariantStudy(id=5, owner=user_2, groups=[group_2]) - study_6 = VariantStudy(id=6, groups=[group_2]) - study_7 = VariantStudy(id=7, owner=user_1, groups=[group_1, group_2]) - study_8 = VariantStudy(id=8, owner=user_2, groups=[group_1, group_2]) - study_9 = VariantStudy(id=9, groups=[group_1, group_2]) - study_10 = VariantStudy(id=10, owner=user_1) - study_11 = VariantStudy(id=11, owner=user_2) + study_1 = VariantStudy(id=1, name="study-1", owner=user_1, groups=[group_1]) + study_2 = VariantStudy(id=2, name="study-2", owner=user_1, groups=[group_2]) + study_3 = VariantStudy(id=3, name="study-3", groups=[group_1]) + study_4 = VariantStudy(id=4, name="study-4", owner=user_2, groups=[group_1]) + study_5 = VariantStudy(id=5, name="study-5", owner=user_2, groups=[group_2]) + study_6 = VariantStudy(id=6, name="study-6", groups=[group_2]) + study_7 = VariantStudy(id=7, name="study-7", owner=user_1, groups=[group_1, group_2]) + study_8 = VariantStudy(id=8, name="study-8", owner=user_2, groups=[group_1, group_2]) + study_9 = VariantStudy(id=9, name="study-9", groups=[group_1, group_2]) + study_10 = VariantStudy(id=10, name="study-X10", owner=user_1) + study_11 = VariantStudy(id=11, name="study-X11", owner=user_2) # create variant studies with neither owner nor groups - study_12 = VariantStudy(id=12) - study_13 = VariantStudy(id=13, public_mode=PublicMode.READ) - study_14 = VariantStudy(id=14, public_mode=PublicMode.EDIT) - study_15 = VariantStudy(id=15, public_mode=PublicMode.EXECUTE) - study_16 = VariantStudy(id=16, public_mode=PublicMode.FULL) + study_12 = VariantStudy(id=12, name="study-X12") + study_13 = VariantStudy(id=13, name="study-X13", public_mode=PublicMode.READ) + study_14 = VariantStudy(id=14, name="study-X14", public_mode=PublicMode.EDIT) + study_15 = VariantStudy(id=15, name="study-X15", public_mode=PublicMode.EXECUTE) + study_16 = VariantStudy(id=16, name="study-X16", public_mode=PublicMode.FULL) # create raw studies for user_1 and user_2 that are part of some groups - study_17 = RawStudy(id=17, owner=user_1, groups=[group_1]) - study_18 = RawStudy(id=18, owner=user_1, groups=[group_2]) - study_19 = RawStudy(id=19, groups=[group_1]) - study_20 = RawStudy(id=20, owner=user_2, groups=[group_1]) - study_21 = RawStudy(id=21, owner=user_2, groups=[group_2]) - study_22 = RawStudy(id=22, groups=[group_2]) - study_23 = RawStudy(id=23, owner=user_1, groups=[group_1, group_2]) - study_24 = RawStudy(id=24, owner=user_2, groups=[group_1, group_2]) - study_25 = RawStudy(id=25, groups=[group_1, group_2]) - study_26 = RawStudy(id=26, owner=user_1) - study_27 = RawStudy(id=27, owner=user_2) + study_17 = RawStudy(id=17, name="study-X17", owner=user_1, groups=[group_1]) + study_18 = RawStudy(id=18, name="study-X18", owner=user_1, groups=[group_2]) + study_19 = RawStudy(id=19, name="study-X19", groups=[group_1]) + study_20 = RawStudy(id=20, name="study-X20", owner=user_2, groups=[group_1]) + study_21 = RawStudy(id=21, name="study-X21", owner=user_2, groups=[group_2]) + study_22 = RawStudy(id=22, name="study-X22", groups=[group_2]) + study_23 = RawStudy(id=23, name="study-X23", owner=user_1, groups=[group_1, group_2]) + study_24 = RawStudy(id=24, name="study-X24", owner=user_2, groups=[group_1, group_2]) + study_25 = RawStudy(id=25, name="study-X25", groups=[group_1, group_2]) + study_26 = RawStudy(id=26, name="study-X26", owner=user_1) + study_27 = RawStudy(id=27, name="study-X27", owner=user_2) # create raw studies with neither owner nor groups - study_28 = RawStudy(id=28) - study_29 = RawStudy(id=29, public_mode=PublicMode.READ) - study_30 = RawStudy(id=30, public_mode=PublicMode.EDIT) - study_31 = RawStudy(id=31, public_mode=PublicMode.EXECUTE) - study_32 = RawStudy(id=32, public_mode=PublicMode.FULL) + study_28 = RawStudy(id=28, name="study-X28") + study_29 = RawStudy(id=29, name="study-X29", public_mode=PublicMode.READ) + study_30 = RawStudy(id=30, name="study-X30", public_mode=PublicMode.EDIT) + study_31 = RawStudy(id=31, name="study-X31", public_mode=PublicMode.EXECUTE) + study_32 = RawStudy(id=32, name="study-X32", public_mode=PublicMode.FULL) # create studies for user_3 that is not part of any group - study_33 = VariantStudy(id=33, owner=user_3, groups=[group_1]) - study_34 = RawStudy(id=34, owner=user_3, groups=[group_2]) - study_35 = VariantStudy(id=35, owner=user_3) - study_36 = RawStudy(id=36, owner=user_3) + study_33 = VariantStudy(id=33, name="study-X33", owner=user_3, groups=[group_1]) + study_34 = RawStudy(id=34, name="study-X34", owner=user_3, groups=[group_2]) + study_35 = VariantStudy(id=35, name="study-X35", owner=user_3) + study_36 = RawStudy(id=36, name="study-X36", owner=user_3) # create studies for group_3 that has no user - study_37 = VariantStudy(id=37, groups=[group_3]) - study_38 = RawStudy(id=38, groups=[group_3]) + study_37 = VariantStudy(id=37, name="study-X37", groups=[group_3]) + study_38 = RawStudy(id=38, name="study-X38", groups=[group_3]) db_session.add_all([user_1, user_2, user_3, group_1, group_2, group_3]) db_session.add_all( @@ -987,12 +998,13 @@ def test_get_all__non_admin_permissions_filter( assert len(db_recorder.sql_statements) == 1, str(db_recorder) if expected_ids is not None: - assert {s.id for s in all_studies} == expected_ids + assert sorted((s.id for s in all_studies), key=int) == expected_ids # test pagination with DBStatementRecorder(db_session.bind) as db_recorder: all_studies = repository.get_all(study_filter=study_filter, pagination=StudyPagination(page_nb=1, page_size=2)) assert len(all_studies) == max(0, min(len(expected_ids) - 2, 2)) + assert sorted((s.id for s in all_studies), key=int) == expected_ids[2:4] assert len(db_recorder.sql_statements) == 1, str(db_recorder) @@ -1000,22 +1012,22 @@ def test_get_all__non_admin_permissions_filter( "is_admin, study_groups, expected_ids", [ # fmt: off - (True, [], {str(e) for e in range(1, 39)}), - (True, ["101"], {"1", "3", "4", "7", "8", "9", "17", "19", "20", "23", "24", "25", "33"}), - (True, ["102"], {"2", "5", "6", "7", "8", "9", "18", "21", "22", "23", "24", "25", "34"}), - (True, ["103"], {"37", "38"}), - (True, ["101", "102"], {"1", "2", "3", "4", "5", "6", "7", "8", "9", "17", "18", "19", - "20", "21", "22", "23", "24", "25", "33", "34"}), - (True, ["101", "103"], {"1", "3", "4", "7", "8", "9", "17", "19", "20", "23", "24", "25", "33", "37", "38"}), - (True, ["101", "102", "103"], {"1", "2", "3", "4", "5", "6", "7", "8", "9", "17", "18", - "19", "20", "21", "22", "23", "24", "25", "33", "34", "37", "38"}), - (False, [], set()), - (False, ["101"], set()), - (False, ["102"], set()), - (False, ["103"], set()), - (False, ["101", "102"], set()), - (False, ["101", "103"], set()), - (False, ["101", "102", "103"], set()), + (True, [], [str(e) for e in range(1, 39)]), + (True, ["101"], ["1", "3", "4", "7", "8", "9", "17", "19", "20", "23", "24", "25", "33"]), + (True, ["102"], ["2", "5", "6", "7", "8", "9", "18", "21", "22", "23", "24", "25", "34"]), + (True, ["103"], ["37", "38"]), + (True, ["101", "102"], ["1", "2", "3", "4", "5", "6", "7", "8", "9", "17", "18", "19", + "20", "21", "22", "23", "24", "25", "33", "34"]), + (True, ["101", "103"], ["1", "3", "4", "7", "8", "9", "17", "19", "20", "23", "24", "25", "33", "37", "38"]), + (True, ["101", "102", "103"], ["1", "2", "3", "4", "5", "6", "7", "8", "9", "17", "18", + "19", "20", "21", "22", "23", "24", "25", "33", "34", "37", "38"]), + (False, [], []), + (False, ["101"], []), + (False, ["102"], []), + (False, ["103"], []), + (False, ["101", "102"], []), + (False, ["101", "103"], []), + (False, ["101", "102", "103"], []), # fmt: on ], ) @@ -1023,7 +1035,7 @@ def test_get_all__admin_permissions_filter( db_session: Session, is_admin: bool, study_groups: t.Sequence[str], - expected_ids: t.Set[str], + expected_ids: t.List[str], ) -> None: icache: Mock = Mock(spec=ICache) repository = StudyMetadataRepository(cache_service=icache, session=db_session) @@ -1036,54 +1048,54 @@ def test_get_all__admin_permissions_filter( group_3 = Group(id=103, name="group3") # create variant studies for user_1 and user_2 that are part of some groups - study_1 = VariantStudy(id=1, owner=user_1, groups=[group_1]) - study_2 = VariantStudy(id=2, owner=user_1, groups=[group_2]) - study_3 = VariantStudy(id=3, groups=[group_1]) - study_4 = VariantStudy(id=4, owner=user_2, groups=[group_1]) - study_5 = VariantStudy(id=5, owner=user_2, groups=[group_2]) - study_6 = VariantStudy(id=6, groups=[group_2]) - study_7 = VariantStudy(id=7, owner=user_1, groups=[group_1, group_2]) - study_8 = VariantStudy(id=8, owner=user_2, groups=[group_1, group_2]) - study_9 = VariantStudy(id=9, groups=[group_1, group_2]) - study_10 = VariantStudy(id=10, owner=user_1) - study_11 = VariantStudy(id=11, owner=user_2) + study_1 = VariantStudy(id=1, name="study-1", owner=user_1, groups=[group_1]) + study_2 = VariantStudy(id=2, name="study-2", owner=user_1, groups=[group_2]) + study_3 = VariantStudy(id=3, name="study-3", groups=[group_1]) + study_4 = VariantStudy(id=4, name="study-4", owner=user_2, groups=[group_1]) + study_5 = VariantStudy(id=5, name="study-5", owner=user_2, groups=[group_2]) + study_6 = VariantStudy(id=6, name="study-6", groups=[group_2]) + study_7 = VariantStudy(id=7, name="study-7", owner=user_1, groups=[group_1, group_2]) + study_8 = VariantStudy(id=8, name="study-8", owner=user_2, groups=[group_1, group_2]) + study_9 = VariantStudy(id=9, name="study-9", groups=[group_1, group_2]) + study_10 = VariantStudy(id=10, name="study-X10", owner=user_1) + study_11 = VariantStudy(id=11, name="study-X11", owner=user_2) # create variant studies with neither owner nor groups - study_12 = VariantStudy(id=12) - study_13 = VariantStudy(id=13, public_mode=PublicMode.READ) - study_14 = VariantStudy(id=14, public_mode=PublicMode.EDIT) - study_15 = VariantStudy(id=15, public_mode=PublicMode.EXECUTE) - study_16 = VariantStudy(id=16, public_mode=PublicMode.FULL) + study_12 = VariantStudy(id=12, name="study-X12") + study_13 = VariantStudy(id=13, name="study-X13", public_mode=PublicMode.READ) + study_14 = VariantStudy(id=14, name="study-X14", public_mode=PublicMode.EDIT) + study_15 = VariantStudy(id=15, name="study-X15", public_mode=PublicMode.EXECUTE) + study_16 = VariantStudy(id=16, name="study-X16", public_mode=PublicMode.FULL) # create raw studies for user_1 and user_2 that are part of some groups - study_17 = RawStudy(id=17, owner=user_1, groups=[group_1]) - study_18 = RawStudy(id=18, owner=user_1, groups=[group_2]) - study_19 = RawStudy(id=19, groups=[group_1]) - study_20 = RawStudy(id=20, owner=user_2, groups=[group_1]) - study_21 = RawStudy(id=21, owner=user_2, groups=[group_2]) - study_22 = RawStudy(id=22, groups=[group_2]) - study_23 = RawStudy(id=23, owner=user_1, groups=[group_1, group_2]) - study_24 = RawStudy(id=24, owner=user_2, groups=[group_1, group_2]) - study_25 = RawStudy(id=25, groups=[group_1, group_2]) - study_26 = RawStudy(id=26, owner=user_1) - study_27 = RawStudy(id=27, owner=user_2) + study_17 = RawStudy(id=17, name="study-X17", owner=user_1, groups=[group_1]) + study_18 = RawStudy(id=18, name="study-X18", owner=user_1, groups=[group_2]) + study_19 = RawStudy(id=19, name="study-X19", groups=[group_1]) + study_20 = RawStudy(id=20, name="study-X20", owner=user_2, groups=[group_1]) + study_21 = RawStudy(id=21, name="study-X21", owner=user_2, groups=[group_2]) + study_22 = RawStudy(id=22, name="study-X22", groups=[group_2]) + study_23 = RawStudy(id=23, name="study-X23", owner=user_1, groups=[group_1, group_2]) + study_24 = RawStudy(id=24, name="study-X24", owner=user_2, groups=[group_1, group_2]) + study_25 = RawStudy(id=25, name="study-X25", groups=[group_1, group_2]) + study_26 = RawStudy(id=26, name="study-X26", owner=user_1) + study_27 = RawStudy(id=27, name="study-X27", owner=user_2) # create raw studies with neither owner nor groups - study_28 = RawStudy(id=28) - study_29 = RawStudy(id=29, public_mode=PublicMode.READ) - study_30 = RawStudy(id=30, public_mode=PublicMode.EDIT) - study_31 = RawStudy(id=31, public_mode=PublicMode.EXECUTE) - study_32 = RawStudy(id=32, public_mode=PublicMode.FULL) + study_28 = RawStudy(id=28, name="study-X28") + study_29 = RawStudy(id=29, name="study-X29", public_mode=PublicMode.READ) + study_30 = RawStudy(id=30, name="study-X30", public_mode=PublicMode.EDIT) + study_31 = RawStudy(id=31, name="study-X31", public_mode=PublicMode.EXECUTE) + study_32 = RawStudy(id=32, name="study-X32", public_mode=PublicMode.FULL) # create studies for user_3 that is not part of any group - study_33 = VariantStudy(id=33, owner=user_3, groups=[group_1]) - study_34 = RawStudy(id=34, owner=user_3, groups=[group_2]) - study_35 = VariantStudy(id=35, owner=user_3) - study_36 = RawStudy(id=36, owner=user_3) + study_33 = VariantStudy(id=33, name="study-X33", owner=user_3, groups=[group_1]) + study_34 = RawStudy(id=34, name="study-X34", owner=user_3, groups=[group_2]) + study_35 = VariantStudy(id=35, name="study-X35", owner=user_3) + study_36 = RawStudy(id=36, name="study-X36", owner=user_3) # create studies for group_3 that has no user - study_37 = VariantStudy(id=37, groups=[group_3]) - study_38 = RawStudy(id=38, groups=[group_3]) + study_37 = VariantStudy(id=37, name="study-X37", groups=[group_3]) + study_38 = RawStudy(id=38, name="study-X38", groups=[group_3]) db_session.add_all([user_1, user_2, user_3, group_1, group_2, group_3]) db_session.add_all( @@ -1118,12 +1130,13 @@ def test_get_all__admin_permissions_filter( assert len(db_recorder.sql_statements) == 1, str(db_recorder) if expected_ids is not None: - assert {s.id for s in all_studies} == expected_ids + assert sorted((s.id for s in all_studies), key=int) == expected_ids # test pagination with DBStatementRecorder(db_session.bind) as db_recorder: all_studies = repository.get_all(study_filter=study_filter, pagination=StudyPagination(page_nb=1, page_size=2)) assert len(all_studies) == max(0, min(len(expected_ids) - 2, 2)) + assert sorted((s.id for s in all_studies), key=int) == expected_ids[2:4] assert len(db_recorder.sql_statements) == 1, str(db_recorder) @@ -1134,7 +1147,7 @@ def test_update_tags( repository = StudyMetadataRepository(cache_service=icache, session=db_session) study_id = 1 - study = RawStudy(id=study_id, tags=[]) + study = RawStudy(id=study_id, name=f"study-{study_id}", tags=[]) db_session.add(study) db_session.commit() @@ -1163,26 +1176,26 @@ def test_update_tags( @pytest.mark.parametrize( "managed, study_ids, exists, expected_ids", [ - (None, [], False, {"5", "6"}), - (None, [], True, {"1", "2", "3", "4", "7", "8"}), - (None, [], None, {"1", "2", "3", "4", "5", "6", "7", "8"}), - (None, [1, 3, 5, 7], False, {"5"}), - (None, [1, 3, 5, 7], True, {"1", "3", "7"}), - (None, [1, 3, 5, 7], None, {"1", "3", "5", "7"}), - (True, [], False, {"5"}), - (True, [], True, {"1", "2", "3", "4", "8"}), - (True, [], None, {"1", "2", "3", "4", "5", "8"}), - (True, [1, 3, 5, 7], False, {"5"}), - (True, [1, 3, 5, 7], True, {"1", "3"}), - (True, [1, 3, 5, 7], None, {"1", "3", "5"}), - (True, [2, 4, 6, 8], True, {"2", "4", "8"}), - (True, [2, 4, 6, 8], None, {"2", "4", "8"}), - (False, [], False, {"6"}), - (False, [], True, {"7"}), - (False, [], None, {"6", "7"}), - (False, [1, 3, 5, 7], False, set()), - (False, [1, 3, 5, 7], True, {"7"}), - (False, [1, 3, 5, 7], None, {"7"}), + (None, [], False, ["5", "6"]), + (None, [], True, ["1", "2", "3", "4", "7", "8"]), + (None, [], None, ["1", "2", "3", "4", "5", "6", "7", "8"]), + (None, [1, 3, 5, 7], False, ["5"]), + (None, [1, 3, 5, 7], True, ["1", "3", "7"]), + (None, [1, 3, 5, 7], None, ["1", "3", "5", "7"]), + (True, [], False, ["5"]), + (True, [], True, ["1", "2", "3", "4", "8"]), + (True, [], None, ["1", "2", "3", "4", "5", "8"]), + (True, [1, 3, 5, 7], False, ["5"]), + (True, [1, 3, 5, 7], True, ["1", "3"]), + (True, [1, 3, 5, 7], None, ["1", "3", "5"]), + (True, [2, 4, 6, 8], True, ["2", "4", "8"]), + (True, [2, 4, 6, 8], None, ["2", "4", "8"]), + (False, [], False, ["6"]), + (False, [], True, ["7"]), + (False, [], None, ["6", "7"]), + (False, [1, 3, 5, 7], False, []), + (False, [1, 3, 5, 7], True, ["7"]), + (False, [1, 3, 5, 7], None, ["7"]), ], ) def test_count_studies__general_case( @@ -1190,20 +1203,20 @@ def test_count_studies__general_case( managed: t.Union[bool, None], study_ids: t.Sequence[str], exists: t.Union[bool, None], - expected_ids: t.Set[str], + expected_ids: t.List[str], ) -> None: test_workspace = "test-repository" icache: Mock = Mock(spec=ICache) repository = StudyMetadataRepository(cache_service=icache, session=db_session) - study_1 = VariantStudy(id=1) - study_2 = VariantStudy(id=2) - study_3 = VariantStudy(id=3) - study_4 = VariantStudy(id=4) - study_5 = RawStudy(id=5, missing=datetime.datetime.now(), workspace=DEFAULT_WORKSPACE_NAME) - study_6 = RawStudy(id=6, missing=datetime.datetime.now(), workspace=test_workspace) - study_7 = RawStudy(id=7, missing=None, workspace=test_workspace) - study_8 = RawStudy(id=8, missing=None, workspace=DEFAULT_WORKSPACE_NAME) + study_1 = VariantStudy(id=1, name="study-1") + study_2 = VariantStudy(id=2, name="study-2") + study_3 = VariantStudy(id=3, name="study-3") + study_4 = VariantStudy(id=4, name="study-4") + study_5 = RawStudy(id=5, name="study-5", missing=datetime.datetime.now(), workspace=DEFAULT_WORKSPACE_NAME) + study_6 = RawStudy(id=6, name="study-6", missing=datetime.datetime.now(), workspace=test_workspace) + study_7 = RawStudy(id=7, name="study-7", missing=None, workspace=test_workspace) + study_8 = RawStudy(id=8, name="study-8", missing=None, workspace=DEFAULT_WORKSPACE_NAME) db_session.add_all([study_1, study_2, study_3, study_4, study_5, study_6, study_7, study_8]) db_session.commit() From 7472ea6067512006837db05e3e00435e40c162b0 Mon Sep 17 00:00:00 2001 From: belthlemar Date: Fri, 5 Apr 2024 11:02:47 +0200 Subject: [PATCH 016/147] feat(thermal): add new matrices for v8.7 --- .../business/areas/thermal_management.py | 40 +++++++- .../thermal/series/area/thermal/thermal.py | 14 +++ .../storage/study_upgrader/upgrader_870.py | 13 ++- .../model/command/create_cluster.py | 7 ++ antarest/study/web/study_data_blueprint.py | 31 ++++++ .../study_data_blueprint/test_thermal.py | 89 +++++++++++++++++- .../business/test_study_version_upgrader.py | 3 + .../little_study_860.expected.zip | Bin 124012 -> 128048 bytes .../little_study_860.expected.zip | Bin 126978 -> 128576 bytes 9 files changed, 191 insertions(+), 6 deletions(-) diff --git a/antarest/study/business/areas/thermal_management.py b/antarest/study/business/areas/thermal_management.py index 9fea2c568d..8444065a6a 100644 --- a/antarest/study/business/areas/thermal_management.py +++ b/antarest/study/business/areas/thermal_management.py @@ -1,9 +1,15 @@ import json import typing as t +from pathlib import Path from pydantic import validator -from antarest.core.exceptions import DuplicateThermalCluster, ThermalClusterConfigNotFound, ThermalClusterNotFound +from antarest.core.exceptions import ( + DuplicateThermalCluster, + IncoherenceBetweenMatricesLength, + ThermalClusterConfigNotFound, + ThermalClusterNotFound, +) from antarest.study.business.utils import AllOptionalMetaclass, camel_case_model, execute_or_add_commands from antarest.study.model import Study from antarest.study.storage.rawstudy.model.filesystem.config.model import transform_name_to_id @@ -338,6 +344,11 @@ def duplicate_cluster( f"input/thermal/prepro/{area_id}/{lower_new_id}/modulation", f"input/thermal/prepro/{area_id}/{lower_new_id}/data", ] + if int(study.version) >= 870: + source_paths.append(f"input/thermal/series/{area_id}/{lower_source_id}/CO2Cost") + source_paths.append(f"input/thermal/series/{area_id}/{lower_source_id}/fuelCost") + new_paths.append(f"input/thermal/series/{area_id}/{lower_new_id}/CO2Cost") + new_paths.append(f"input/thermal/series/{area_id}/{lower_new_id}/fuelCost") # Prepare and execute commands commands: t.List[t.Union[CreateCluster, ReplaceMatrix]] = [create_cluster_cmd] @@ -351,3 +362,30 @@ def duplicate_cluster( execute_or_add_commands(study, self._get_file_study(study), commands, self.storage_service) return ThermalClusterOutput(**new_config.dict(by_alias=False)) + + def validate_series(self, study: Study, area_id: str, cluster_id: str) -> bool: + cluster_id_lowered = cluster_id.lower() + matrices_path = [f"input/thermal/series/{area_id}/{cluster_id_lowered}/series"] + if int(study.version) >= 870: + matrices_path.append(f"input/thermal/series/{area_id}/{cluster_id_lowered}/CO2Cost") + matrices_path.append(f"input/thermal/series/{area_id}/{cluster_id_lowered}/fuelCost") + + matrices_width = [] + for matrix_path in matrices_path: + matrix = self.storage_service.get_storage(study).get(study, matrix_path) + matrix_data = matrix["data"] + matrix_length = len(matrix_data) + if matrix_length > 0 and matrix_length != 8760: + raise IncoherenceBetweenMatricesLength( + f"The matrix {Path(matrix_path).name} should have 8760 rows, currently: {matrix_length}" + ) + matrices_width.append(len(matrix_data[0])) + comparison_set = set(matrices_width) + comparison_set.discard(0) + comparison_set.discard(1) + if len(comparison_set) > 1: + raise IncoherenceBetweenMatricesLength( + f"Matrix columns mismatch in thermal cluster '{cluster_id}' series. Columns size are {matrices_width}" + ) + + return True diff --git a/antarest/study/storage/rawstudy/model/filesystem/root/input/thermal/series/area/thermal/thermal.py b/antarest/study/storage/rawstudy/model/filesystem/root/input/thermal/series/area/thermal/thermal.py index a93c4378cb..c11083a882 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/root/input/thermal/series/area/thermal/thermal.py +++ b/antarest/study/storage/rawstudy/model/filesystem/root/input/thermal/series/area/thermal/thermal.py @@ -2,6 +2,7 @@ from antarest.study.storage.rawstudy.model.filesystem.inode import TREE from antarest.study.storage.rawstudy.model.filesystem.matrix.constants import default_scenario_hourly from antarest.study.storage.rawstudy.model.filesystem.matrix.input_series_matrix import InputSeriesMatrix +from antarest.study.storage.rawstudy.model.filesystem.matrix.matrix import MatrixFrequency class InputThermalSeriesAreaThermal(FolderNode): @@ -13,4 +14,17 @@ def build(self) -> TREE: default_empty=default_scenario_hourly, ), } + if self.config.version >= 870: + children["CO2Cost"] = InputSeriesMatrix( + self.context, + self.config.next_file("CO2Cost.txt"), + freq=MatrixFrequency.HOURLY, + default_empty=default_scenario_hourly, + ) + children["fuelCost"] = InputSeriesMatrix( + self.context, + self.config.next_file("fuelCost.txt"), + freq=MatrixFrequency.HOURLY, + default_empty=default_scenario_hourly, + ) return children diff --git a/antarest/study/storage/study_upgrader/upgrader_870.py b/antarest/study/storage/study_upgrader/upgrader_870.py index a2afc4bd1f..0635215896 100644 --- a/antarest/study/storage/study_upgrader/upgrader_870.py +++ b/antarest/study/storage/study_upgrader/upgrader_870.py @@ -50,10 +50,15 @@ def upgrade_870(study_path: Path) -> None: # Add properties for thermal clusters in .ini file ini_files = study_path.glob("input/thermal/clusters/*/list.ini") + thermal_path = study_path / Path("input/thermal/series") for ini_file_path in ini_files: data = IniReader().read(ini_file_path) - for section in data: - data[section]["costgeneration"] = "SetManually" - data[section]["efficiency"] = 100 - data[section]["variableomcost"] = 0 + area_id = ini_file_path.parent.name + for cluster in data.keys(): + new_thermal_path = thermal_path / area_id / cluster.lower() + (new_thermal_path / "CO2Cost.txt").touch() + (new_thermal_path / "fuelCost.txt").touch() + data[cluster]["costgeneration"] = "SetManually" + data[cluster]["efficiency"] = 100 + data[cluster]["variableomcost"] = 0 IniWriter().write(data, ini_file_path) diff --git a/antarest/study/storage/variantstudy/model/command/create_cluster.py b/antarest/study/storage/variantstudy/model/command/create_cluster.py index f9edfba949..80b7fbe580 100644 --- a/antarest/study/storage/variantstudy/model/command/create_cluster.py +++ b/antarest/study/storage/variantstudy/model/command/create_cluster.py @@ -135,6 +135,13 @@ def _apply(self, study_data: FileStudy) -> CommandOutput: } } } + if study_data.config.version >= 870: + new_cluster_data["input"]["thermal"]["series"][self.area_id][series_id][ + "CO2Cost" + ] = self.command_context.generator_matrix_constants.get_null_matrix() + new_cluster_data["input"]["thermal"]["series"][self.area_id][series_id][ + "fuelCost" + ] = self.command_context.generator_matrix_constants.get_null_matrix() study_data.tree.save(new_cluster_data) return output diff --git a/antarest/study/web/study_data_blueprint.py b/antarest/study/web/study_data_blueprint.py index 6d7dff831e..53935986b0 100644 --- a/antarest/study/web/study_data_blueprint.py +++ b/antarest/study/web/study_data_blueprint.py @@ -1894,6 +1894,37 @@ def redirect_update_thermal_cluster( # We cannot perform redirection, because we have a PUT, where a PATCH is required. return update_thermal_cluster(uuid, area_id, cluster_id, cluster_data, current_user=current_user) + @bp.get( + path="/studies/{uuid}/areas/{area_id}/clusters/thermal/{cluster_id}/validate", + tags=[APITag.study_data], + summary="Validates the thermal cluster series", + response_model=None, + ) + def validate_cluster_series( + uuid: str, + area_id: str, + cluster_id: str, + current_user: JWTUser = Depends(auth.get_current_user), + ) -> bool: + """ + Validate the consistency of all time series for the given thermal cluster. + + Args: + - `uuid`: The UUID of the study. + - `area_id`: the area ID. + - `cluster_id`: the ID of the thermal cluster. + + Permissions: + - User must have READ permission on the study. + """ + logger.info( + f"Validating thermal series values for study {uuid} and cluster {cluster_id}", + extra={"user": current_user.id}, + ) + params = RequestParameters(user=current_user) + study = study_service.check_study_access(uuid, StudyPermissionType.READ, params) + return study_service.thermal_manager.validate_series(study, area_id, cluster_id) + @bp.delete( path="/studies/{uuid}/areas/{area_id}/clusters/thermal", tags=[APITag.study_data], diff --git a/tests/integration/study_data_blueprint/test_thermal.py b/tests/integration/study_data_blueprint/test_thermal.py index 3297a1fba8..88aa5b7e5f 100644 --- a/tests/integration/study_data_blueprint/test_thermal.py +++ b/tests/integration/study_data_blueprint/test_thermal.py @@ -27,12 +27,13 @@ * delete a cluster (or several clusters) * validate the consistency of the matrices (and properties) """ - +import io import json import re import typing as t import numpy as np +import pandas as pd import pytest from starlette.testclient import TestClient @@ -265,6 +266,21 @@ ] +def _upload_matrix( + client: TestClient, user_access_token: str, study_id: str, matrix_path: str, df: pd.DataFrame +) -> None: + tsv = io.BytesIO() + df.to_csv(tsv, sep="\t", index=False, header=False) + tsv.seek(0) + res = client.put( + f"/v1/studies/{study_id}/raw", + params={"path": matrix_path}, + headers={"Authorization": f"Bearer {user_access_token}"}, + files={"file": tsv}, + ) + res.raise_for_status() + + @pytest.mark.unit_test class TestThermal: @pytest.mark.parametrize( @@ -527,6 +543,77 @@ def test_lifecycle( assert res.status_code == 200 assert res.json()["data"] == matrix + # ============================= + # THERMAL CLUSTER VALIDATION + # ============================= + + # Everything is fine at the beginning + res = client.get( + f"/v1/studies/{study_id}/areas/{area_id}/clusters/thermal/{fr_gas_conventional_id}/validate", + headers={"Authorization": f"Bearer {user_access_token}"}, + ) + assert res.status_code == 200 + assert res.json() is True + + # Modifies series matrix with wrong length (!= 8760) + _upload_matrix( + client, + user_access_token, + study_id, + f"input/thermal/series/{area_id}/{fr_gas_conventional_id.lower()}/series", + pd.DataFrame(np.random.randint(0, 10, size=(4, 1))), + ) + + # Validation should fail + res = client.get( + f"/v1/studies/{study_id}/areas/{area_id}/clusters/thermal/{fr_gas_conventional_id}/validate", + headers={"Authorization": f"Bearer {user_access_token}"}, + ) + assert res.status_code == 422 + obj = res.json() + assert obj["exception"] == "IncoherenceBetweenMatricesLength" + assert obj["description"] == "The matrix series should have 8760 rows, currently: 4" + + # Update with the right length + _upload_matrix( + client, + user_access_token, + study_id, + f"input/thermal/series/{area_id}/{fr_gas_conventional_id.lower()}/series", + pd.DataFrame(np.random.randint(0, 10, size=(8760, 4))), + ) + + # Validation should succeed again + res = client.get( + f"/v1/studies/{study_id}/areas/{area_id}/clusters/thermal/{fr_gas_conventional_id}/validate", + headers={"Authorization": f"Bearer {user_access_token}"}, + ) + assert res.status_code == 200 + assert res.json() is True + + if version >= 870: + # Adds a CO2Cost matrix with different columns size + _upload_matrix( + client, + user_access_token, + study_id, + f"input/thermal/series/{area_id}/{fr_gas_conventional_id.lower()}/CO2Cost", + pd.DataFrame(np.random.randint(0, 10, size=(8760, 3))), + ) + + # Validation should fail + res = client.get( + f"/v1/studies/{study_id}/areas/{area_id}/clusters/thermal/{fr_gas_conventional_id}/validate", + headers={"Authorization": f"Bearer {user_access_token}"}, + ) + assert res.status_code == 422 + obj = res.json() + assert obj["exception"] == "IncoherenceBetweenMatricesLength" + assert ( + obj["description"] + == "Matrix columns mismatch in thermal cluster 'FR_Gas conventional' series. Columns size are [4, 3, 1]" + ) + # ============================= # THERMAL CLUSTER DELETION # ============================= diff --git a/tests/storage/business/test_study_version_upgrader.py b/tests/storage/business/test_study_version_upgrader.py index ef3ddf97c4..f0c0cee009 100644 --- a/tests/storage/business/test_study_version_upgrader.py +++ b/tests/storage/business/test_study_version_upgrader.py @@ -211,8 +211,11 @@ def assert_inputs_are_updated(tmp_path: Path, old_area_values: dict, old_binding # thermal cluster part for area in list_areas: reader = IniReader(DUPLICATE_KEYS) + thermal_series_path = tmp_path / "input" / "thermal" / "series" / area thermal_cluster_list = reader.read(tmp_path / "input" / "thermal" / "clusters" / area / "list.ini") for cluster in thermal_cluster_list: + assert (thermal_series_path / cluster.lower() / "fuelCost.txt").exists() + assert (thermal_series_path / cluster.lower() / "CO2Cost.txt").exists() assert thermal_cluster_list[cluster]["costgeneration"] == "SetManually" assert thermal_cluster_list[cluster]["efficiency"] == 100 assert thermal_cluster_list[cluster]["variableomcost"] == 0 diff --git a/tests/storage/study_upgrader/upgrade_870/empty_binding_constraints/little_study_860.expected.zip b/tests/storage/study_upgrader/upgrade_870/empty_binding_constraints/little_study_860.expected.zip index 508430ffc5b01313f9157d424d75df6dae64ad2d..11b6e564d44485ae041f46ff53cdb41fd95a1164 100644 GIT binary patch delta 18004 zcmbtb30TzC+MhF^>?5lK%m6b(xP%IBp_Un?_2r6|`+{VsCt=#ljxb(o`_b`&Dk){hsP-#h2Z$x>?_Q&VM=QKl2}8y7Wvua(?gjJLkM-d*|+l zK1aXyDedZ8R&1JH3LjkP=#1-&{mW*y^DXCpi^5%Z1mh>R`s03W0?JMX`0i2Kj@ct3 zd=0*aJve`?LH=0_zoT&XHsRQWLO8U>D;lSw;dUS20|VDtDVhS)hIo00g91SX4)L+# zI%MIZX+B3l?8NA`n<_!1KN7|EDwc6OUn_pc7$b=JrUd!;R{o{@l@1`5Y%t&*)fPF7 zppene2N`^3l8Kqk22%o(G??~e|0>ju>#p(20hhLX-+s*`Kv0yMNJ!A?KNSioSdFx? zBx+*Ovj<*;vyYA3T#aITE0N7#7+^r&kIgup#r`*>%iv)fQob7OJ@V<`bCICP$G7ln zuh4dqxIj_>PHNjiXDj@K<7I7~I=i;*FrX;9=HT=(pxHe|DV){Tc647s#5V=32%W4) z46g9)#6>>zoekpxSAX)-1aNFFjEhql7CgkyB98ga_}dbHVqT9zq>(a194T~~-1ST3 zDq$W3(Lu~dW2%2Y-n_!U04(m^P#>EE7FWv_@A}zD&<3Ps*h$Tt071>sfO1gtQrXW5 zg`lREtHGxIK7kq!^&IMmbM}XEr0r?3p~71G;ryg zUIxRhOWt}O;&(y^sxo-$+;AKnHWu6@wqLVo|NcHy>R;I_6S;dC`Eh|l$D`EQSiHk7 z{8cZnQTzRHcT)^`r3{7QCE;dO3v;z|i>^xE7V)@Be$sz@(LCcyL-x41cl->n-3YfNy|7LBC%oJlUNGY4BJ9SRLJt#MdN@e z^`{el$sR*uUqxYor(xY`fDiqRl_9RKJ-E^~*qbe5r$&WAZ%_?JSq;z5OPtjARMeZS zPmSpDRPMa|ZutxInQQNYnJe(2zj1Rqon7vw>WrB_KQmjSl=@DUvKT-q3{}BVmWn7l zJ@nb}UI*^uD#ydP*lrG_dhW{lbodC0PXJgb_IH>)=OqPVNODUFwd@fkeW*J3Y$*D4 z+33vY2@DYL^%4cCf!E~l>2KZehyR(dM>^v0IY+Q==+qLk=E-R2gJqTa;fp$T!e19} zV>-_}7l6KP5=~wjd`-a9c3R#_+m$ ze{4(+ZPxVx$-TG-Nl$L@1=ov@@4mViW<>pVKfP>kzyp#y-~oLF2RspsdeTV{JQpl7 z&xelp$JYm&xaV}?vJQz;U1huE$?5+1yR;CeHO@6B)eY&d4nnhLs`K#h=YAG^aAXKM zT8Y9{G%S5WV&G`L*~UwUKFYmH9nk{}-2QccmL23K!beSdyw9i4vGZrYmCyuDl>tXPSV-Q@Z?-K#DOOdx2VQjBqMEyr(`&=ZA6r3 zgy6U-9`fP9rZiy}1)OxyaKc&V9R+Mf8F15xNW3g9k~_E~tqbs3*Y%q`3qy!}S~*N_ zP&x3W^j343ppxFIG36lVcOlPs?Cpt#oM8?!r4DIHbQxF)IOn;D=14gFoG>(x8E z1%6O<->WB2&G`(k9$N%Qy@aTZ{>wmy%Bl0%U=lP!gQ2se(H@=NJw2OmT$X@x z`cXDRppT+mH$wq#^x=c!0@{I>e*q=*)1o3DcZ!0>Gex&5$V>4qP&< zg$ZK8KtoGem-5i+tb1qztYpfd$>Y8Q&$~s9EtO@eR*5Xb)yQ!p%Szw_;ZOI!h37ofR1~pX3@{ zN5zGik9$JHk?HXs=airs@vb?gRs5xT;@X25@h(ZlX&aI;yLm2Vd(1`uqn_mm%7x2z ztTkNZc*;eKd$Pvu3HWGc+sJvDbF(t%>D*7a4wl@!&g9zb!f_F|E{}j5UX{zwYONL% zE|bBMel9U*XU&-{_)qsFY+phTdi0g!+#>mTfZk21tq`5_b!E(4p1^CT&Mj>N9dkaU z$Ni;k42FgjAKf(xE#&#G_VI-#1Wvva)aB&|upe3{AF1y8`Qtf@?7pIHb}`JCwu?p9 z&F`a7@>)HL;M4U;UNj!C_yD*uYVGBs*FmMbt?<2-Odhvg%CkgmR8oLdxt`xgap65;NMJ!DDj z29N=#Q4ro*1J-b}&-pWC(7vr63TFu7>9I2aKGbUH27|J8$@#aNmlN0rqvkP!QJ)2Mak4OO z+@6U4DEGnV%KX(pXzoaJ_+wMKP5qlIJ5P|u$)%4(*Xn0Q;QDeixM%Q9m_tK~_R$aP zLB%6D77ID?+1G}E*!=V5lly_#Qh4OlW22GuCg;wHCWAYLxe)+)_D zzEk^ktCC-M`z@9u3qFZ0wSn*Ps_(I_p{)%uT}2-H9Z!~B755;n`d+4gyfxjve143e&$u?mzkZ z_{@O>o)+>C3<9m?g#&u`f~h?arZ&~uc%Z#TgYzLJ^dPT&seSUG4MCspej~@y4tA8^ z>hCJ?yhF*{?4d&)U?aNzg*z7_z-$8_^Pr>7A;4yYJ6n-ax@c!2KW2-PqZwp-i=V?3S{R(sVh(pjIq~(EZ@)y zux$8{Q(qt}`H;etf;!T)0*z8{wdubs#I^>-lApw7olgk1?i7C&f84B zTZR0Mm55ZYL1CKGI9U?Mv@w(P)c`tVlt5x395;&wlJ2WSzGTCBtB@VNO52?3+!Xz) z6XU#@er;8of>OPh-bO-cYS?5H`P*tR_6NDX+KB;)8I(tMtU>Kv%kvcRf;Aw1ih}se zP#jR*EbyTYMBpSJ{*I-9R4+I9G7u?VD^%zyW{7j6h8I)ZR;5V^ELM>ujL2xqfK8_wOYHB8554FgJ1Ok+*mG<9II zdUB!^oVXwZ#KJEH(7y}-fzed?0N40U#;;kERa?U)*?rO?c`Vj(DQcPB!zw^qy`_Md z?SIB8TAa$WC@PV#E;>ult0+=ijU_cxZcsGI*oPq5k3Lg@Cfs0?)}jw;7L|UE7fx~*aH_4dZW#`(7NSL2 zT$fUTrReIV(REYOrC)zTmC<4>s*DX+lh?F(YTKe$ghf#nR)s~9%Tct~NeSvwj>TMq z@oTR@cCU^PF@2h1EbhBA4=EboYq+diE@%i`pEDPIH2QiasJ^)8Ls2O4VXR9aQwi#* z8QB(Ahs8)Rw(mgOW1(y+LQ|9_l3-k>f`ZkGGzNG`Z!l;{XxmXS(0pAM*VjA=8@WDx z5|pAWsxXlZhKeVzh)xE)F+NI@@@a-@cnx~^1!(*sF#>-d&+inJ=ow&17YQI%U1>E`T0rNxfw6=YeeN+0;F*_>g(l`X257vn9kDIeJ3=JWz9~HbBZgd z5`sA*A=IZiwect{`tju0F33`9k(Ats(Tp&%>?mZgG6|r0n^!|74rA$IEwlh z$t$50M8+^PsXqjW?Gi%s0#8@~9iC&dnn!olMe6?&8K6A5tBv5l*Z)e5L@jJmEGq$c zO?_U{FP1SWAT@x0$snb2Ndc(-&lxRg5g*JIP{R~6xpV}A{E30WGzCdgkW0r<@#xLv zLe+&VPFN>>DgnrMJLRhawUtIWWGoXW1CGkqLyBoN%C{M!yC$2!M8ax>BgRQ8Y{!t5 z9R?H?YKUiL#MC-*n?uK-ark%=xJUTvgd#gKH;hc$1ABm~@g#c>;zg4#fOjWQKrHzM z&@N7@YHm!FP&4;Bq1Zk)nAB_oTfa{twtdJ_KGsj0HdxA|_X6erwV@@V>5eKIU z&O^`9$)_X+3n14|V=JVX#e~BdO4t4Q5hu0>rP%h}bF}#>;H5rrrbxvvaAQKAJAI*MWh5 zum-DkCdh;7yIE#<9+G~J7v|*A7$fj6Q-lG<;`}*}q`n1l>dhG*XWB~)bXr5$TGiR9d7v@oBqMgokG6QNSCFC;{{$aLM zjHqM;=KNNvk*Gx>%8*1eWas_oOGY#$APP#2pjt-79+QiQ6e}!*=mnAvP0cMuBoaCX zJMCPc?Cs{|W@pSza#!D>m>rpGCjUGMdx#$w81SOJIBkW83o2qELuu+cTtF#{D5$HZ zllt1n!*|T&U8jD@29E;mB0tnC_eNsfhHPr{d$5DN&oH>(T;cXl>M*hZ+Kt~VBTZ0G zf#VixuNdT8o6*Z@vyE?Wl^c241{qloN{%s9N3ykEC~+1LVdVNLm{_+~P-VPEOhF7g z1v)}j(#8b*TTzInyg~tiAz95+t0bglRhS-Bh~NwUIW5X^JJN zA$cRiF09^>Fs?NdvPby#$}n8N$Xl59?TeXODnjc-cr_~*F%#Qq$V7umRHW;*)|%50 z!Gcl=@$PA%(p1*NLfld&#UNHf&~|Bv8%qdj`T#|^?KzyW(@ci_9elaPkjZMk>uVfY zL8hKTk;<*TY?Y8Ru-V;OVZcEp&R0$B8gTtU;SneRiU9^={Uxc0)Dix$wdgEif%35Q|@Q;vM$e`F>d;!!a zZKL9`xS-tiuC=K`%<&D|i2V}kswq%VRH2KYYE30mMe5EWmlc>4Gyq~$6{&7O-O14N zP8({v1S?lc#vcK75han2&ujNdgjt!tUA1D;))gquu($^7kmBmLxuv*n?U3RUYXvl_ zDyk(_1?6U3KwQoXl3RUu3T~xcY%V^pT@)(4LCJ=&mg<$es7t&CN^ysg*nhV~qUKRw z(W8=hV>hukXpKmHFm(?VcfP`8I~BB-G+fr5LToi0#hRlv5A=#>jaQ(156XbHmz_?H zk6wn>*nL!cwB{L}FvWjkE$XwOMrYW5DrLoKmBQU3oClc7)lwrl^NDL7#=%u)5_|>b z`$Gpg6$vAceu31Q7J*-;cK$J9zlr2KGx3>U0J70= zngOlS8s-L=cmu3jU#A-FxL~i#zy-$b#>H~G@?0($V)FN;cGu@}fiJu9xmsq0oWFrw z`i!ahm;t(qw7}q`oo+WK9;d?b@X{2auC#K0Gvao$_AnBCOG{iLnS2YGtiK+I7uSqM znR9ijcoiqN--7jdzzG8$dBCFCR!Vk){e;_=2dS4&5c_S!pW+oQqi=(js5cDQa!|X< z(@@@`jqBeu;3bFnmq--JdtXAM|62wUa>wPxj3!jCadifK_fU-b8mEiou8Z6Eo)Ae%lGINRXNL%xJWk^lS_AmQXK{6{S~r5 xv#bOe=H<_SaZ$I7Ir$lLGv~Qz)qG9ubXMAsHf`w<;v=R^+TCrv^pir9NTU zg2Mc%G}Aa^ipAOJVINbUNP~fty@9NFwO<(i2eKT)fm_K8ht8q{2@bCB!cwOT^ zj3r>;4l8~zg@@p9-+@W)JQq9@Xh41LF5Fkl{urA&X54Edz8TYJ`~#tfV({^9J1rSr z>#MGr6o2g2%}+ii#2<;T`)W#<;5Sa4L#mRWNBPoJl~YBI*MCwz1&<6EsHhhO%*I~_ z3}uCt200l&XigMIJsFq2WX8?rcu~q4=x=C-@;AS>*Etv8FppB)4h?(}H~l$A(uyXo ztH`f5sE?j@dCh;4DsWnmpj{SJ3lYiMcuX)an-rbfuk)_Jo?`Gj02g&x`>;lNKlnMS zN!r(;KKRj)sIW^(AwHIA^$?%~LZpR?P0b(RPc;n*5&mF=>O)lOr{lyxXUmT-Gk zb=QT|f`1Rk(<~{9Se4}gWkkMpImJMdaZX4$UKelC9j3bg=^LaOJ1d#DABE$e;{VWx zjpp%*5v*9kVhNkOx4w!?mg2>`#Wc6y( zN#Sr3_!hFNt)~xkh|yVzr3G58AvKbm-GnT1(K^p%d}xxYAKU^%-jLd_H!zZHZPBW; z)=M8~#wUk`t9(kGR@v3`&*%N|?R)y;QNsn-Im2J`4;Ncq796I8;9aOcwmhh@#v=Lr zkr*+GWbDc_EiFho?&o{*_kENuhD*Ctu9y(~h}yEY-2by?QL{$jv#Y<9A^2idi@&M7j^j3W9ah(!FUyz+$NYxltaZA3iQMEM(H>SsTmWLsfg0t@~Y_@o;gIDbYw zPM#i!8>jom1Q<+JW3Cm@4?hNsT@J^9vCHw^=}YaVK4wrc7(&1Z`w!`lWM(9B^ql!= z^PibN=}|DU2n^7V%E-RroUv{jLHahxnH|V~1Q=Kd%Jk#afWNMRRGL94=+Tc#7w1Pg z@W~k-Q@5`;3zBhJp5CBMIsp0nn;&s~6AiYgn8f5P7-wD#&wC$N`B`+LA>dqJEZepI7xl1$gGIwbz zmG60(yR!=Q=k8~P-sf&nk^agE(R-V=A!OJ4C{%0=Y1-~87KTM<+Ma%@cl^JzJ#jp- zuAlYk(mDC_yzJAtCNrKrH(CrQnPhzfr^2#kU)(b{PDEHkz}=-JmelV5NFC*Rk$zJiOWODM5~+Uc@<^)=^wPTD z&GQ+4Rf}*Kc83{21>o+&w=-TR+%-Xgb3cZ<_6A zd8tEp`;iJS98vjFF;dkLaJ8W~%b*V%KkdL{o=wK-3zcIHmbzG#zbs0gIxHN3Cgo>;mKCs#V~xt9jWrXiodOUqT|acJRTm0^HK zk}r?Jo=uPB>{zY{g}H^YmI+2mmwlu)lC><7e12TtNbm{p1X@ zXGI8qq9}bSio||`REU*~hi@R}k&3($>fujVK1ukMswfp6EuM#e95L_yZrH?~2%`!s z-dHW1FF&ewrzJ%^SS~CZ5^QRw(FWo@L^F=tzJs{@D#CC`U7XzNbaZNR&3HjwsJMMe zXMbRpRH?Tr|pfal5`Z(3KL$5!;9+&;MjVhnKSCsRISpv=;L>&n9?dZ zqdpOzt2awShCYWny;|>x9-3BMQY_z-IpN+=V)RdTiqVHQc{3V= zcW)BbD=;i2wfk#udT)!cJ?tqPh}X0Ps2%Gc#N3_-YuiRjB?fQbtoF(#$;H-S{BWyT zs194g+-U~k>#fogiS~9v*t#)kODw+K>T$>H_zqH;v?Wa&%;0M%!W}8K9YXTHN74KN zjlQSAORM5~Nt*@NuVAsU7q*HJtJ`S|-n=zhYg}>)=bAPTOzn6{Tap)_*IR>X-)1Lv z50~WHj}U4h>@yfbmB!5NljL6X1l3N#G&y_`PEO3S`}L@*!;DwAM+pNU_pC3z)b11k zyr;`de!7Gl!k2Wo(&^u&cvZvai&u37lGH0GfE>Pz#7gpw>RhWCXKiB(ww&&eYp@Wm z+IFWBCVl?qk~aF9s;-VC-WOMQN)MsZI5L&%ojS%J>e3kB)74+dAQ?w?M=BE!MqS!< zYAZ$$ABHD%Cn@0K?)#x<(nQw4D!no_ZJ%ooL3q%1<-NIc`gRvmZ8(3M`Ssx_+_*!y zj(oPGKgY>)APe z=g%gUzwy83o|;7lRHDAJZSrs>S|D{(*}st0UY0-qPeo}@JzZMjvA+>>6)I90Tor8p zaT>u@C|*7`ke(_um*83yOD0ys_ijdo+&GLfNq03I!D(=X@F9a6ke!^pfC#sv!h&B$OfqZZf>>W+Ri?u?Np zDilGE)k@DoZXp}FRSQN6IF9$36iR~oU?Ifx;p!?zMTjBVhvO5+*`)w+V zVwXVWQ^>)U$YL5CV8FKnl!plGBmraxgCzp=05JeU-&xvvg1rv_5ZIC&A^idjMYTaiRzN-&}6 zlwm9ixkgZwX|*Kkrc+o_luNM~i-SoHkPn_`IVJ;#xjy8|Mihvcm_zl5OC>Um(H3gJ zfzv{DUi(=*ly`9%Uy$XL6^6t2tp@n5aM_HSa#6)$HkL+EIVWkqz*k&0(a>55 zVg%RmPG(z8!*D#N9zwg~B%|t)LKLvj_~gQL{VTEW-~Bpm?Hc@v~7<$YuN_g{!KV9p=#VRk{Hm8)nlu z%-jr1KuU%I-*_NSMd@&V4ED)nF!^D#aP<-Jv)L3^&b|~%A$6_D;(BL{!BAXMwqSnx zoYMSRp093PbX^yWhH)%GTYx@yQ?3kJxebsBV@duN$X7(93h&$kcnibr$}1d?jSY0l zRuHiKks7e$#u9$J;rNifP_Q{1B75IO3Xdu12epCzO-Vm@f`gqPI9}EU`cuYn({W|$ zeJ+JXetaB3Z}$@UKv?A8hHYGal@mO($X9Rye7oEu@=KC#-tJx^FMotNz=I}vME(Vb z$cqOB3Yz<0+yR;8iE zO(db+nw%x5O&;yfvUc2!+iWulL z!H-?ww<4a*Q4BjDAo#PZ!Zn#1>F4H=?)Bk3l8S+$^bayRP2NZ(ZjT!6st98gaL;=DbaRxe-|5=_xHz7km_BqT$F`)Z|{Cnn2h%|=VdOSx8{5+>Grzj z%$~}F&{=anVUXU;95an%?4j$4cz#j#Beq(ucgwh$0PV!|oz9HttvRzeM0#CwE=u}3 zYtDpRYFZt+jnL)BbTF5Wi)IsHv*r&oXeD48iVU<6G|Q@95|a5WL4d8Kzi|P)xRAei zaGb3+)AP84(oT;H5!*x99`Lm!q_f@_$L2Ew-WHq>@=4qK`U_6@Ok(-KqabWS>GvlJ zIZ49@ngvB#kcQ0SxcToQPA`h__ZGOPgBP-kc{RBOk)9`irEmVr~T+xmQ7`qL4ee z_M!BX04|Vq5LWAp98+;&JoK4GTuY>IWy>z(yKZ>DZsZoDtC;D_T&_0s#T+LpQcQLb zPFU!^utPz8)k(Y8v&5d!q{v*ijdxpd7Mw#+!3@NkO)TqPD-V3QoxZbu4g953| z;RZanU1?Yra{+IAFHdbQH8N5-46y+zWnVk+?Z<_~5N$Qn_j3(6bYUb}@ehw9!>YqD z`$v{gTzRwQwP4VgQVR7|^J4-&t&HQRMgELEn8cTqyXV22;*!}saP`}1JGtJ<1PgcCZV4qKN@XF zklUQ{Zghl7pIXLUf;A@XQ2Diyai7BZ?!Tn{IEqw}Xxt$iu8>!l34E}|rM#c8f&OU) zM?@ZtE5d`g%Xoh!huvfvYa&x~r2&`JSs`rs-OWa(eFpN^E6I!SZ&h_uLe;N1$EwKr z&rpI|JMkoFCsk9#MrNGUj-8pa$dpkdX(v&%alInygPR2wjt6i29L&Yl$mV>!P;7kF zUsXf$PwTGw(D5nC{rV!4bK;QIGR45%0o~>@-d9VlILQ*qI*Gg+m-*lekohNng#G3pKKW0Xdp zW%1J+LKY(LCSmpPC1_RD+ zi6pnq2`^>r*s!pHw4aB^EH9^LpzbWjqfBO~GEyrqK$(WFW}4*xE{HRN8a3@*ZNSG{ zY%1nXsUW4LET)(>Oa{B@JBCxkrWYhcB=5NPp05E8Xr#Far&8wfukh{h7MT&(dOOcx zJHBtT@;YT>-tJ#Z743NEX65Hvyl5u?so5GSvUb$0wB*9?j4Y;vbyUO_F_>>ws!Jl< zk?2ZgdY(&>#lZh7MWhUy%k-s$T~%X1_Js2~-Lsy#z+Y|Qe3mwZ^$fR*{2fo8ng5xg z{I_Ak*Hhl4M(sGRO?AAMo$g||16KjR=U(!~y!&o2;P`gEtFe;xLMG$DQ{RrzCjB{A zCwUiZ7YJZ6AJ7rjy@Z$Ju_L?U5QDI&|Nx{PWnR5z+^MZUwWF3CFi7dT@81F^b zB+_vSw)*=vlFOHn%vCEz4OKg;-$*(xBb}d(DjJkVwHSY7g3ODmU`}IpGd1BLW!I1o z8FfWr_4s(eNV|;dM46T=@RM?lk$|@ZBO)~@7%38D6n?6d`SXiQ3MGrwvrM?Lu8X|^mAu_T?uLJB7TiS?%4;QSZy@Cd zN_>Mo+DeYy6gOCPn7~E4jIJ$ICPirD@4=wy`9+ABZ^COljm2FF8;SWBVDD5QGPqLs zr4z4mIbeOCdKb!O5m&<>l HuTK9DcfReq diff --git a/tests/storage/study_upgrader/upgrade_870/nominal_case/little_study_860.expected.zip b/tests/storage/study_upgrader/upgrade_870/nominal_case/little_study_860.expected.zip index e8e657ae9819a8c23051efbef4c69b82d5304191..bb83d7774579e14cb7323c200b3408052271027f 100644 GIT binary patch delta 10776 zcmaJ{30PIf^?!383c<$%1XLDT1XKhSx46eGiT}i+qM`;V0S$^vfCw73S~W4LO++6| zWK{XOzocSDjbZTbPob5 zQylGioIf;O^RyvY?)2IQzhpmGul%5XVaFbCGOz@Nc=eWku+_=)_^LmiZR>{5dK*gYct%y=_c{H3R#fnx3c3c2zx^svi1GwOrnvw_}*tvpp4Z z%-o6QD1QcKPO(iDCbIvjiPK5iVbl|^h!$1pE=;3wTXY0TKaBj`8qAP5ntOw#tb3~s z!p!PG(s~qmV0&yI)uOC|bK=5qZfsA8XZ!xZrmKTLv*Pr)7rLw@kj+PtiSen`jCEsN z44F|Wc$$8kR_=u_j(eE{$)}O{yJ>(yS1x%edD@mFuLXG&CmA|~EnYiA&7PgqPxXy6 zYXq)K61{+G#?uWYl-zDbfwCw@*?=Xsz;&fVbZjz7LX*Q9Ew z1d`tPlSyZhu4fH?JwZdx!LSlPoD3-yN%eCM@s zDj89!%YkiY>c30=aC|B42hz}l0`!%pOKG0s3`ktd=%3*F^aKT?a@rlgw$$d&_YT(Q z`mtt6aCcmon(J{* z_KU8GrfOnAbZZJrzyx?`&sal_)gnFcOocyolW037*syO_7|2D=O{eaKsmwUDUKf_} z`ek0>v-zgdiZJAx>NVz@t0f7I^29^gc?uemZpXFR!g-eK-z?X2{Q&=Uh5pciAFUj0 z${2n5@W!v^`H=meBExA%Glg3#z46A}o-Ro`l>3rPlJdg3WXJy%H2ax)C!%1eX?NDYG?3kyZb64l?7CI&r10AxciPFp=1 z79!`)^^?=<1SLK*^l*sFxRed*g3#EJb71 z^(Pn4%gvJZ4|Gyf;#wQR>Faueb%|?PhQe|7dS4~Za*?wm2)C^7p%xitsUsNMH^gw! zrkjo<-SG9zV*icVl8MaBy$geJ?p7Z%=#s2w$Ifr?%RBih_Ix)IEMSX*6;CYeMJ8WD zUZ5a#TcYTG#=~^)d|%gH)eWcbNK_ixYwhHDJ@ADck!q2>)%7lXQ7XEDdldTNS9eYY ze8`cr@N~LJHDS*JgNbOL_6PFM^GiTHN3C&{>nJi0*P zY4rxA#QPFgAd!RjrMsfO@ur4wRf&)oZ`{$)*Sr|h7^@bN8>br|U^oB38AuL(OZRy_ zpDbLgrQ)42YLWHnM&}FQEwiEJ#AqCL!m#@Q#{lw}$TJDprkX(#nhYR5wSo9}lfgvh z$d65dN`$r5_DY z!G9%V3lTI0d0L3-VeQyX^s#Xh{k3Bai-we956*NWDHY+G~0)iE|%{AX6L& z8WJ9>9B3w>QferPe1JSia1ojfYgB$LsVqW~E+7w!TtPxePB9wk1}TKxDMs*q2O7s< zG|7P8jJu%!H|~nQ@uO#;Km5pq{_<`EI>e=UH;U8WA7r?u>eo}b;zu`s4|4em@+R+= zpm>wVA~0W;pk&>24z6UD5ce`C$+Kmq?Qt)&dylIODLshnBziCE=f>{VXb+NJO6`7s zFZEQuxhlW>d_Bmja!P-)7pbWNL1ieOEZm1u^sH@gaZUh_Fk_O)!zi45UW&%5b&55X z{A@ph83jvpJo){8G}k?#H<^FH)lxuhJ>Y`6O*f7M+R1Oq5Nvx;O@j_1IE!_6S*Q|! zDoVC;s$1UUxfq{AkhF3%!!&>(TkEMCx65TRScgQ(8H6T=2_r3$`dFcPYg#`GE{ckC zf8S*{bT*G*mWM{ksel9;VFKy|Ah(xphQo#vy31Hr09rzlD^LWURNKuotpG3CxPqKL zghEgYV;fo=-OV($0Dr3wBfE7#4D%zD$gHi8f%SCX2^Zi)ewU6^W!v> zMS`YZ5Q*~?6&Wyxi|ErEkT~}TWb=CtFbKwV1RwBQ=ds$=8-HCc{pm z81-tv4^h^1S_>zI;?^WX;7k=a$>4# z+1K22^}yG)<+M>*r2VveD|c{GDBd;OXeC<81VV9oo@;q(xq@^#&tRk0Z5bCJ$7_*F zWdvb0>Swlc6L8`!b3}=9|5?YmFnVN@x6a6l$IKO#J`1_=DyvudxC6gH!XEJ z2z6~Y?YtljE-?C&z5)cgww#NV9h_10g}!TRKVM05A7dE;9E zxhuz|VnVd1g4CL6*v8DZZd%QxU5*iM>X$$ve!rDn#Cww4IwK#Xx|}4!)H8y9F%PEK z26K}OxTu>mB}BUbG=1)@pCWV%Tf@YHaQbTDrA&RteUhzL3E`D=VjX$)JW`_$c0i1^ zTGw0fq;(N$dJ4<28~8Gw@V3}QIP>&huvHiWD0F(8w_BHQwBXG3YQm++TzS(*GUXZy zFily(_5&B_?&+}!$a3Baq=1W*^q=nl$+|@ZLr;Rd1Z!E_fF*aUK|kM`9x;SAootxT zf(A;rk+(0=ihlB*#PHF_wZQY>62*x6l_Hx4x2`yIw;LHfN%dthZURQ`?Y3V7kDTzb zSdB&N$>eLO7b(1Am>@Aqynb^>7l7r_6{O}m3|AbRaaXCO{T&QEh)|{;bjA#GbTgB1 zIi<+N$!+zwWz$be>_@$y$6lkb?-w&>u~+iga#yM`$H1o?{ag-ep7w)#894wSFHz$y zXoNdov!Dy|f0el8mQ9H4QJMSXF`vEujn%p8Ra*|&Af_Wfr}owCBeOq8F(xCzt)-=w z;(dL{rO%Bq<<|cX6|z2I2)|u5$DdY3x9~ zaQQwp1JpV6OCP$&xFXccobCmo_;S14GUR7RW~^gx;UrQuQ=K}xSvPdkBzt1EoT zeL27e^UA)W^gRc;EKWaUSHp6khu)<0#pR3+f2#nwpmPJeW<3OAeafQs_q7PZ5#JLe zhNzF$nW(UKR)w(kFI}yD0yJhVt|}9oi4v_kEJVLVz12A0;LR&Xz^wE`@Cn6Walla^ z4XiL)%wx1$kg6-949;-(lPa0iFtSum?G1>JI4vCaDf3rTeM9K-j&WIizON^j@36?9 zIPcI7zH-x8ZTfa4k7~4-M0L`ZehStG#7Tj3l?yAGLLK#Humv!XoT@>r`^3HY@ zYX^r2!w;&(&4b&-Nz>~YdMHk`2NoF^-z!M-kpaH%Qs~miDp9XOI2@BrR5ULOVb+R4}>z^sh)duxdLuVoN z-_Tkc+CA13C#>5irBuR zf8U^Ykn}NJ`%ut*<@XbVtj9?vo8V4t2`5G4O()cwYdmPY$w^TGBj5~@=OfdtCp26e ze+ul<|9TGe@`yBZS^#PMJMvS?<>oNR(=dVg-znawEtXZ8>$6tpKkLk!@ICF(S*OXu z?~&LSS>2Fh-y?hYU#WC{Rzbn?oMl4Z)!qp5PbzPX``Ch`P8v7Ql!p|y^b_*dLlmz! z0bEx2c^-0x+<0hwheVI)-n8-znea#o$TWD64<1n+9UmfcWjlh;PNbc&$2f@XF$ytk z%&*YLh`SDC$;&KQl09Taer}FFR%3tSBI6uE4pC0cQAAoE%e(5W$EdgczU98<>m?S; K^kPIu-2V><=?sbh delta 10094 zcmaJ{3s{v^);{|j6g?t04;dWfqJkl6XgY>wVw3q>O@bHH96?l41i5(0OFgMsmj7=8 zE_HhY!CNQ_C<>a|;FCEzs6n^Uioi*;@&b(Z{5>;{{d@1Vzwg`MImf@Adi>VAZhNh@ z*WP;_>u!6$-Qr!)ztc)OEqyNg5AvdeAR^LHm=fHHf6viq4fzg8u>}>L?$W8i;EgK$ zb%2-PX|jdDC~^Ql8oY1O`5Q^3mr&W#=_Pp4HrONwlkF1?&ZB-d{j4h`w-3SJBZKiTp^_OwXpTU7<+H?0y%NTJ9gM-k5svXA+Ep& z?%Ri>(xw-tC z+KT9ddg8$!ROtgPAx_mE*VdB}18IIKahlYIaWZTVm#IQ$Ga)9)AO0B84{a`tC{VR= z??<>29N_O42qlr>J}xVXfgYuHco-Rvx*8|+ftD&S2!FyKelM~^OOyk)_wu-uA+D;% zKx%JUaPDft-97qyuP~ZlM!LJ|$#r$5pDG>*`o^#BRPK()kOwLx)QKd9_yraY_R zoxKeC_QBJToH*piPTHTo>;%4Jq;TSpl1^}ZsROQzd)yF{yBgwV+95VE$Q?+S;B+m) z7-&m&($)w!k@>785}K(yr6BK9d1nt zfY=4y5ojI1M757*@;^4r%rg|A2`9}xLVBon^lT3W#Rd(nC%Tal@b5`4pv>sx<*pY0 zpxH=cq;bm{pTVhV2*9GxnSC9Vm9oMjDg<<&tb zqY-|aW+I$|pK2YDFu@MXrgy{b%loL(cCjgCPJbSoO7--6_y?}=PU>&;&I!^@r&Qlb z?IK$u;-^J@+)VRWA0Fy_<;-Z;@Y$g%JwU0AfLU%yv%~F-0DAe98pp)hZi#cMg)t5O zdu9Or_`13G@CxAuWxEKNl?;6*y5$V*W&g!&kEx&JmNutbp@hM(nLhNrdJ?KtObYyK zUN`!%o^I!E%4{Gd%huk2tg>*PkQS!c@_XR1fIbDo%lH?PirG%MfqLm z{sy)^l)G6J)Il({Hiv8&UFnfsXnc6WLwR`cN&p!j9h1wqv&O@n+F@gC7seFz8%tYKBqp@orX-#WI(EdlI31?^6)7 zQxZn*tVV?`aSk}SCmWWQj737#zO}TZNlP+6I#c%yf=i`RP~+$7OLn=akKZi`<944y zZGL-X6{6fRq;jq0d*v-|R``ut$7 zU{h1*r3NVmR#y4b%Ri7PL-nBp5%lHjvW#HW{{L|kfV<^BF!cZynz%+BIv@{)Rr?cR zSos*#_HOw~PqMx+@t{27`HGo6ICoGgTeW{SU&e+CZ%8?0YTLXV9af zqcLguwX4-faTJU^j=iON{_(}KCH{CK+bq56qk%%Yy5^%me5uit0&-ly zG^CJJ2$)_hB*%3-lr)fd`soH@yuInD1B6||TZunCy^;9Ra3B%-5-6c0oW2I+c~mPt zF9CT)|8&xpuZS0Yb|b;&YgRYFMN{A=S4|-lHj%*|)P+#5&EyZb24QykW-=1_Ts*$I z*~Q~oy1H=6oDHF+^bsZkx0p<1Zc$9c(yLoYj936aCHcZ)>5#2tkbZ2?(nc1|vaKZ6 z;9{=Iko;>aNkaDp+xy!*vwgG8#5Q-kXSR>FYiz=gx?L<)|FwhkG%O4Hieg4D-{I;v zd)s=^E=6Rd!7gH&S47g3U^VX~K9m#_e7NI^nZ}`plrZvhF&Sf!grdniNxE54s-CM@ zD!h85#MLXx6#3E{B_!TmTL?`pB{+(}B4oaW(3_=5yd*!r@?l98Rec+rztIcc-q zA0ZgBsu~vYf4ocsDu{6yA>@CQL4*E8^C}3oc9Cn5`tubGS}PzYJ#bE5H86+6D;f3> zF{sE@m*RJc?VS%fp7jb>>!b~*jiP(GyYjn~SEpFao_18K7;l6AOgL zne?Tjm{Z0y#fUT6B_Mb(1uu9?mBb{|x*D}g&YWc_Tb4{CY6(6?si16~%qd~?P_5>Z zAfK3mhurCwUH@ zU_AY&ah|6=V#$BzSd=lKQ49#-71IR++?as_91MckRb?sKu3D0j{r~PNTYHKHBInB4 z5#Kx7=vT@8s8m{VnmC~)$G9phso#RI$1T=%28upvi@kxk>lO=0Lop9qY^{J~D#g(t z^5Q>}Ce*P6)LmB|C5n@CB<^XCeTUd1_t=ZGP`Aq-YeBMyJ!YLUn@dP1MHA%v%ho)b z2U~4|!uO1HzS>~(Bwg{nPU8CX0Ws0ThP)9}RZoBLKxisvowpy``$}%`5jFz9(wviy8pNHg-Pc<^X#zy{>sg>&^ zMo~2HCi8hv7A?I*f;=3{h36T(TEHPR;hcO87$?D7=U8w4R^nynluj8-D=soDX+BpS zq7SY0D27<;5Hbc=82s!uCN?=+h#6xn9>sWT1CsSw78U+Y$w6k*%o3Fe|EeUePqh%^ z{*h=({>nC>Hp?{JBbyLuTZlt%#kqcYxF&6?iN5Vc>W0O#bBf|juIJ5Y0q0k{zi|7w zAcuNt{K9Sjg`J8Lbfd7V&QbTHcwNqo=XwaXKmuG75V%! ztw^&nvNIPNR~24nqSQo@u3tvae8Hkv-oW}0f2|5gMETnW7UgD%`!tfCc-9FlsgYq{ zt`M@&ceOkQ`Fq2fu!>V7=z~T#jr~4nQac15MdxcXIjjzb`OvnnSe4el%@st^Uq06g zpH*oDjroEETOX|ESRb1Cg?7ml?ij)Y-sKDlimPH7scSen2#&3lt1ym9{`j;`T*vWl z^um{HGx9L-|M-z2&(e#HjA^fA>iZQjMuOaXaYp*nr$1**w~cU1#}bq+Z@?j8bXZ zFDx`IyU z9h-v)U$yzE;VY>Zuy9oyrW(T5)g}5$<(p6Bhub5)XfPDE)Bmm;&lvat~OMCSA zbWVK-TtI`0z#^p)#c zY4UT!wr`&Wu9XLyf3~4qk?kQu`fS%wW?S%~kb+Bc?2FKsH`wLLuZ;UF?^MFXPDZ&_ zt~~pM_Wu=Br#}5Pw3AFa@H#t2wJ|WpqSFBjlvc98vZ4%vl}^Qesp3>w@&`^d)ICv(l9lR)3O+3(1j6y73Od z>8vYh&=$X63!;j^L1RwwVS}O&kF$LD~Fst`rt|OGL zY-hKU7h9F-vw7h@;yz=2O=Z`|*owYpmBBR4_NedKBZhe>IfhelMP03ddxNtE+qUVT z6xD~`v3XJ2UE<(ld0%}Dt}FAWx9_kUSw8aIZI?eeYn2n**V~lG8`Ty|R@$=k1xohx z`;q9D%m2x$zVA^BWYvaa1?iW9ew|IaP9L>c7N##+l#xA0YU5V5zkNmn?y;OAMUZ1^m+}gyhreNOFYGYt)ap9Ntix^=Wz|EDfb!k3$=9leT~`JE$2CQ_ Date: Fri, 5 Apr 2024 11:13:57 +0200 Subject: [PATCH 017/147] fix(thermal): change validation on empty matrices --- antarest/study/business/areas/thermal_management.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/antarest/study/business/areas/thermal_management.py b/antarest/study/business/areas/thermal_management.py index 8444065a6a..7661c8993c 100644 --- a/antarest/study/business/areas/thermal_management.py +++ b/antarest/study/business/areas/thermal_management.py @@ -375,7 +375,7 @@ def validate_series(self, study: Study, area_id: str, cluster_id: str) -> bool: matrix = self.storage_service.get_storage(study).get(study, matrix_path) matrix_data = matrix["data"] matrix_length = len(matrix_data) - if matrix_length > 0 and matrix_length != 8760: + if matrix_data != [[]] and matrix_length != 8760: raise IncoherenceBetweenMatricesLength( f"The matrix {Path(matrix_path).name} should have 8760 rows, currently: {matrix_length}" ) From 449162971307696b988970bc468f4fcc7c93840b Mon Sep 17 00:00:00 2001 From: belthlemar Date: Tue, 9 Apr 2024 14:25:05 +0200 Subject: [PATCH 018/147] refactor(thermal): enhance exception msg --- .../business/areas/thermal_management.py | 38 +++++++++++-------- .../storage/study_upgrader/upgrader_870.py | 2 +- .../model/command/create_cluster.py | 15 ++------ antarest/study/web/study_data_blueprint.py | 1 - .../study_data_blueprint/test_thermal.py | 6 +-- .../business/test_study_version_upgrader.py | 12 ++++-- 6 files changed, 37 insertions(+), 37 deletions(-) diff --git a/antarest/study/business/areas/thermal_management.py b/antarest/study/business/areas/thermal_management.py index 7661c8993c..b08a17a391 100644 --- a/antarest/study/business/areas/thermal_management.py +++ b/antarest/study/business/areas/thermal_management.py @@ -364,28 +364,34 @@ def duplicate_cluster( return ThermalClusterOutput(**new_config.dict(by_alias=False)) def validate_series(self, study: Study, area_id: str, cluster_id: str) -> bool: - cluster_id_lowered = cluster_id.lower() - matrices_path = [f"input/thermal/series/{area_id}/{cluster_id_lowered}/series"] + lower_cluster_id = cluster_id.lower() + thermal_cluster_path = Path(f"input/thermal/series/{area_id}/{lower_cluster_id}") + series_path = [thermal_cluster_path / "series"] if int(study.version) >= 870: - matrices_path.append(f"input/thermal/series/{area_id}/{cluster_id_lowered}/CO2Cost") - matrices_path.append(f"input/thermal/series/{area_id}/{cluster_id_lowered}/fuelCost") + series_path.append(thermal_cluster_path / "CO2Cost") + series_path.append(thermal_cluster_path / "fuelCost") - matrices_width = [] - for matrix_path in matrices_path: - matrix = self.storage_service.get_storage(study).get(study, matrix_path) + ts_widths = {} + for ts_path in series_path: + matrix = self.storage_service.get_storage(study).get(study, str(ts_path)) matrix_data = matrix["data"] - matrix_length = len(matrix_data) - if matrix_data != [[]] and matrix_length != 8760: + matrix_height = len(matrix_data) + # We ignore empty matrices as there are default matrices for the simulator. + if matrix_data != [[]] and matrix_height != 8760: raise IncoherenceBetweenMatricesLength( - f"The matrix {Path(matrix_path).name} should have 8760 rows, currently: {matrix_length}" + f"The matrix {ts_path.name} should have 8760 rows, currently: {matrix_height}" ) - matrices_width.append(len(matrix_data[0])) - comparison_set = set(matrices_width) - comparison_set.discard(0) - comparison_set.discard(1) - if len(comparison_set) > 1: + matrix_width = len(matrix_data[0]) + if matrix_width > 1: + ts_widths[matrix_width] = ts_path.name + + if len(ts_widths) > 1: + # fmt: off + (first_matrix_width, first_matrix_name), (second_matrix_width, second_matrix_name) = list(ts_widths.items())[:2] + # fmt: on raise IncoherenceBetweenMatricesLength( - f"Matrix columns mismatch in thermal cluster '{cluster_id}' series. Columns size are {matrices_width}" + f"Column mismatch : The '{first_matrix_name}' matrix has {first_matrix_width} columns " + f"while the '{second_matrix_name}' matrix has {second_matrix_width}." ) return True diff --git a/antarest/study/storage/study_upgrader/upgrader_870.py b/antarest/study/storage/study_upgrader/upgrader_870.py index 0635215896..9b67a77c52 100644 --- a/antarest/study/storage/study_upgrader/upgrader_870.py +++ b/antarest/study/storage/study_upgrader/upgrader_870.py @@ -54,7 +54,7 @@ def upgrade_870(study_path: Path) -> None: for ini_file_path in ini_files: data = IniReader().read(ini_file_path) area_id = ini_file_path.parent.name - for cluster in data.keys(): + for cluster in data: new_thermal_path = thermal_path / area_id / cluster.lower() (new_thermal_path / "CO2Cost.txt").touch() (new_thermal_path / "fuelCost.txt").touch() diff --git a/antarest/study/storage/variantstudy/model/command/create_cluster.py b/antarest/study/storage/variantstudy/model/command/create_cluster.py index 80b7fbe580..a884eb7b9c 100644 --- a/antarest/study/storage/variantstudy/model/command/create_cluster.py +++ b/antarest/study/storage/variantstudy/model/command/create_cluster.py @@ -115,6 +115,7 @@ def _apply(self, study_data: FileStudy) -> CommandOutput: # Series identifiers are in lower case. series_id = cluster_id.lower() + null_matrix = self.command_context.generator_matrix_constants.get_null_matrix() new_cluster_data: JSON = { "input": { "thermal": { @@ -127,21 +128,13 @@ def _apply(self, study_data: FileStudy) -> CommandOutput: } } }, - "series": { - self.area_id: { - series_id: {"series": self.command_context.generator_matrix_constants.get_null_matrix()} - } - }, + "series": {self.area_id: {series_id: {"series": null_matrix}}}, } } } if study_data.config.version >= 870: - new_cluster_data["input"]["thermal"]["series"][self.area_id][series_id][ - "CO2Cost" - ] = self.command_context.generator_matrix_constants.get_null_matrix() - new_cluster_data["input"]["thermal"]["series"][self.area_id][series_id][ - "fuelCost" - ] = self.command_context.generator_matrix_constants.get_null_matrix() + new_cluster_data["input"]["thermal"]["series"][self.area_id][series_id]["CO2Cost"] = null_matrix + new_cluster_data["input"]["thermal"]["series"][self.area_id][series_id]["fuelCost"] = null_matrix study_data.tree.save(new_cluster_data) return output diff --git a/antarest/study/web/study_data_blueprint.py b/antarest/study/web/study_data_blueprint.py index 53935986b0..d75488f49e 100644 --- a/antarest/study/web/study_data_blueprint.py +++ b/antarest/study/web/study_data_blueprint.py @@ -1898,7 +1898,6 @@ def redirect_update_thermal_cluster( path="/studies/{uuid}/areas/{area_id}/clusters/thermal/{cluster_id}/validate", tags=[APITag.study_data], summary="Validates the thermal cluster series", - response_model=None, ) def validate_cluster_series( uuid: str, diff --git a/tests/integration/study_data_blueprint/test_thermal.py b/tests/integration/study_data_blueprint/test_thermal.py index 88aa5b7e5f..00fcc25aff 100644 --- a/tests/integration/study_data_blueprint/test_thermal.py +++ b/tests/integration/study_data_blueprint/test_thermal.py @@ -609,10 +609,8 @@ def test_lifecycle( assert res.status_code == 422 obj = res.json() assert obj["exception"] == "IncoherenceBetweenMatricesLength" - assert ( - obj["description"] - == "Matrix columns mismatch in thermal cluster 'FR_Gas conventional' series. Columns size are [4, 3, 1]" - ) + pattern = ".*'series'.*4.*'CO2Cost'.*3" + assert re.match(pattern, obj["description"]) # ============================= # THERMAL CLUSTER DELETION diff --git a/tests/storage/business/test_study_version_upgrader.py b/tests/storage/business/test_study_version_upgrader.py index f0c0cee009..2d85c7f718 100644 --- a/tests/storage/business/test_study_version_upgrader.py +++ b/tests/storage/business/test_study_version_upgrader.py @@ -1,5 +1,6 @@ import filecmp import glob +import os import re import shutil import zipfile @@ -163,7 +164,7 @@ def assert_inputs_are_updated(tmp_path: Path, old_area_values: dict, old_binding path_txt = Path(txt) old_txt = str(Path(path_txt.parent.name).joinpath(path_txt.stem)).replace("_parameters", "") df = pandas.read_csv(txt, sep="\t", header=None) - assert df.values.all() == old_area_values[old_txt].iloc[:, 2:8].values.all() + assert df.to_numpy().all() == old_area_values[old_txt].iloc[:, 2:8].values.all() capacities = glob.glob(str(folder_path / "capacities" / "*")) for direction_txt in capacities: df_capacities = pandas.read_csv(direction_txt, sep="\t", header=None) @@ -206,7 +207,7 @@ def assert_inputs_are_updated(tmp_path: Path, old_area_values: dict, old_binding for k, term in enumerate(["lt", "gt", "eq"]): term_path = input_path / "bindingconstraints" / f"{bd_id}_{term}.txt" df = pandas.read_csv(term_path, sep="\t", header=None) - assert df.values.all() == old_binding_constraint_values[bd_id].iloc[:, k].values.all() + assert df.to_numpy().all() == old_binding_constraint_values[bd_id].iloc[:, k].values.all() # thermal cluster part for area in list_areas: @@ -214,8 +215,11 @@ def assert_inputs_are_updated(tmp_path: Path, old_area_values: dict, old_binding thermal_series_path = tmp_path / "input" / "thermal" / "series" / area thermal_cluster_list = reader.read(tmp_path / "input" / "thermal" / "clusters" / area / "list.ini") for cluster in thermal_cluster_list: - assert (thermal_series_path / cluster.lower() / "fuelCost.txt").exists() - assert (thermal_series_path / cluster.lower() / "CO2Cost.txt").exists() + fuel_cost_path = thermal_series_path / cluster.lower() / "fuelCost.txt" + co2_cost_path = thermal_series_path / cluster.lower() / "CO2Cost.txt" + for path in [fuel_cost_path, co2_cost_path]: + assert path.exists() + assert os.path.getsize(path) == 0 assert thermal_cluster_list[cluster]["costgeneration"] == "SetManually" assert thermal_cluster_list[cluster]["efficiency"] == 100 assert thermal_cluster_list[cluster]["variableomcost"] == 0 From 3e9307bff5a57d948874392c1d782f70ad3b9fe7 Mon Sep 17 00:00:00 2001 From: belthlemar Date: Tue, 9 Apr 2024 14:29:03 +0200 Subject: [PATCH 019/147] refactor(thermal): change exc message --- antarest/study/business/areas/thermal_management.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/antarest/study/business/areas/thermal_management.py b/antarest/study/business/areas/thermal_management.py index b08a17a391..cc1fecd205 100644 --- a/antarest/study/business/areas/thermal_management.py +++ b/antarest/study/business/areas/thermal_management.py @@ -391,7 +391,7 @@ def validate_series(self, study: Study, area_id: str, cluster_id: str) -> bool: # fmt: on raise IncoherenceBetweenMatricesLength( f"Column mismatch : The '{first_matrix_name}' matrix has {first_matrix_width} columns " - f"while the '{second_matrix_name}' matrix has {second_matrix_width}." + f"whereas the '{second_matrix_name}' matrix has {second_matrix_width}." ) return True From cc28d51bfa72d5d49d5a5c2b80583198c4a04f30 Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Tue, 9 Apr 2024 15:26:28 +0200 Subject: [PATCH 020/147] refactor(thermal): improve matrix widths mismatch error message --- .../business/areas/thermal_management.py | 20 ++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/antarest/study/business/areas/thermal_management.py b/antarest/study/business/areas/thermal_management.py index cc1fecd205..fc24c12f13 100644 --- a/antarest/study/business/areas/thermal_management.py +++ b/antarest/study/business/areas/thermal_management.py @@ -371,7 +371,7 @@ def validate_series(self, study: Study, area_id: str, cluster_id: str) -> bool: series_path.append(thermal_cluster_path / "CO2Cost") series_path.append(thermal_cluster_path / "fuelCost") - ts_widths = {} + ts_widths: t.MutableMapping[int, t.MutableSequence[str]] = {} for ts_path in series_path: matrix = self.storage_service.get_storage(study).get(study, str(ts_path)) matrix_data = matrix["data"] @@ -383,15 +383,17 @@ def validate_series(self, study: Study, area_id: str, cluster_id: str) -> bool: ) matrix_width = len(matrix_data[0]) if matrix_width > 1: - ts_widths[matrix_width] = ts_path.name + ts_widths.setdefault(matrix_width, []).append(ts_path.name) if len(ts_widths) > 1: - # fmt: off - (first_matrix_width, first_matrix_name), (second_matrix_width, second_matrix_name) = list(ts_widths.items())[:2] - # fmt: on - raise IncoherenceBetweenMatricesLength( - f"Column mismatch : The '{first_matrix_name}' matrix has {first_matrix_width} columns " - f"whereas the '{second_matrix_name}' matrix has {second_matrix_width}." - ) + messages = [] + for width, name_list in ts_widths.items(): + names = ", ".join([f"'{name}'" for name in name_list]) + message = { + 1: f"matrix {names} has {width} columns", + 2: f"matrices {names} have {width} columns", + }[min(2, len(name_list))] + messages.append(message) + raise IncoherenceBetweenMatricesLength("Mismatch widths: " + "; ".join(messages)) return True From a53562b486e5a395a25d0158cb2ddd75bedfadab Mon Sep 17 00:00:00 2001 From: belthlemar Date: Tue, 9 Apr 2024 17:03:44 +0200 Subject: [PATCH 021/147] fix(thermal): use posix path for windows --- antarest/study/business/areas/thermal_management.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/antarest/study/business/areas/thermal_management.py b/antarest/study/business/areas/thermal_management.py index fc24c12f13..4a17cbe37c 100644 --- a/antarest/study/business/areas/thermal_management.py +++ b/antarest/study/business/areas/thermal_management.py @@ -373,7 +373,7 @@ def validate_series(self, study: Study, area_id: str, cluster_id: str) -> bool: ts_widths: t.MutableMapping[int, t.MutableSequence[str]] = {} for ts_path in series_path: - matrix = self.storage_service.get_storage(study).get(study, str(ts_path)) + matrix = self.storage_service.get_storage(study).get(study, ts_path.as_posix()) matrix_data = matrix["data"] matrix_height = len(matrix_data) # We ignore empty matrices as there are default matrices for the simulator. From e7cb3aa1c3b3385d6c6f15d41d0ba93b4c2a8b0f Mon Sep 17 00:00:00 2001 From: MartinBelthle <102529366+MartinBelthle@users.noreply.github.com> Date: Mon, 15 Apr 2024 21:31:29 +0200 Subject: [PATCH 022/147] fix(bc): change exception messages for binding constraints validation (#2009) --- antarest/core/exceptions.py | 11 ++- .../business/areas/thermal_management.py | 7 +- .../business/binding_constraint_management.py | 95 +++++++++++++------ .../command/create_binding_constraint.py | 22 ++--- .../command/update_binding_constraint.py | 2 - .../test_binding_constraints.py | 92 ++++++++++++++---- .../study_data_blueprint/test_thermal.py | 6 +- 7 files changed, 158 insertions(+), 77 deletions(-) diff --git a/antarest/core/exceptions.py b/antarest/core/exceptions.py index 9094d322be..378c1dd093 100644 --- a/antarest/core/exceptions.py +++ b/antarest/core/exceptions.py @@ -385,9 +385,14 @@ def __init__(self, message: str) -> None: super().__init__(HTTPStatus.UNPROCESSABLE_ENTITY, message) -class IncoherenceBetweenMatricesLength(HTTPException): - def __init__(self, detail: Any) -> None: - super().__init__(HTTPStatus.UNPROCESSABLE_ENTITY, detail) +class MatrixWidthMismatchError(HTTPException): + def __init__(self, message: str) -> None: + super().__init__(HTTPStatus.UNPROCESSABLE_ENTITY, message) + + +class WrongMatrixHeightError(HTTPException): + def __init__(self, message: str) -> None: + super().__init__(HTTPStatus.UNPROCESSABLE_ENTITY, message) class MissingDataError(HTTPException): diff --git a/antarest/study/business/areas/thermal_management.py b/antarest/study/business/areas/thermal_management.py index 4a17cbe37c..88281a7be5 100644 --- a/antarest/study/business/areas/thermal_management.py +++ b/antarest/study/business/areas/thermal_management.py @@ -6,9 +6,10 @@ from antarest.core.exceptions import ( DuplicateThermalCluster, - IncoherenceBetweenMatricesLength, + MatrixWidthMismatchError, ThermalClusterConfigNotFound, ThermalClusterNotFound, + WrongMatrixHeightError, ) from antarest.study.business.utils import AllOptionalMetaclass, camel_case_model, execute_or_add_commands from antarest.study.model import Study @@ -378,7 +379,7 @@ def validate_series(self, study: Study, area_id: str, cluster_id: str) -> bool: matrix_height = len(matrix_data) # We ignore empty matrices as there are default matrices for the simulator. if matrix_data != [[]] and matrix_height != 8760: - raise IncoherenceBetweenMatricesLength( + raise WrongMatrixHeightError( f"The matrix {ts_path.name} should have 8760 rows, currently: {matrix_height}" ) matrix_width = len(matrix_data[0]) @@ -394,6 +395,6 @@ def validate_series(self, study: Study, area_id: str, cluster_id: str) -> bool: 2: f"matrices {names} have {width} columns", }[min(2, len(name_list))] messages.append(message) - raise IncoherenceBetweenMatricesLength("Mismatch widths: " + "; ".join(messages)) + raise MatrixWidthMismatchError("Mismatch widths: " + "; ".join(messages)) return True diff --git a/antarest/study/business/binding_constraint_management.py b/antarest/study/business/binding_constraint_management.py index ac012ed0e0..8c44784920 100644 --- a/antarest/study/business/binding_constraint_management.py +++ b/antarest/study/business/binding_constraint_management.py @@ -1,7 +1,7 @@ import collections import itertools import logging -from typing import Any, Dict, List, Mapping, MutableSequence, Optional, Sequence, Union +from typing import Any, Dict, List, Mapping, MutableSequence, Optional, Sequence, Tuple, Union import numpy as np from pydantic import BaseModel, Field, root_validator, validator @@ -12,11 +12,12 @@ ConstraintAlreadyExistError, ConstraintIdNotFoundError, DuplicateConstraintName, - IncoherenceBetweenMatricesLength, InvalidConstraintName, InvalidFieldForVersionError, + MatrixWidthMismatchError, MissingDataError, NoConstraintError, + WrongMatrixHeightError, ) from antarest.core.utils.string import to_camel_case from antarest.study.business.utils import AllOptionalMetaclass, camel_case_model, execute_or_add_commands @@ -39,6 +40,7 @@ ) from antarest.study.storage.variantstudy.model.command.common import BindingConstraintOperator from antarest.study.storage.variantstudy.model.command.create_binding_constraint import ( + EXPECTED_MATRIX_SHAPES, BindingConstraintMatrices, BindingConstraintProperties, BindingConstraintProperties870, @@ -255,11 +257,7 @@ def check_matrices_dimensions(cls, values: Dict[str, Any]) -> Dict[str, Any]: # Also, we use the same matrices for "weekly" and "daily" frequencies, # because the solver calculates the weekly matrix from the daily matrix. # See https://github.com/AntaresSimulatorTeam/AntaREST/issues/1843 - expected_rows = { - BindingConstraintFrequency.HOURLY: 8784, - BindingConstraintFrequency.DAILY: 366, - BindingConstraintFrequency.WEEKLY: 366, - }[_time_step] + expected_rows = EXPECTED_MATRIX_SHAPES[_time_step][0] # Collect the matrix shapes matrix_shapes = {} @@ -311,38 +309,72 @@ class ConstraintOutput870(ConstraintOutputBase): ConstraintOutput = Union[ConstraintOutputBase, ConstraintOutput870] -def _validate_binding_constraints(file_study: FileStudy, bcs: Sequence[ConstraintOutput]) -> bool: +def _get_references_by_widths( + file_study: FileStudy, bcs: Sequence[ConstraintOutput] +) -> Mapping[int, Sequence[Tuple[str, str]]]: + """ + Iterates over each BC and its associated matrices. + For each matrix, it checks its width according to the expected matrix shapes. + It then groups the binding constraints by these widths. + + Notes: + The height of the matrices may vary depending on the time step, + but the width should be consistent within a group of binding constraints. + """ if int(file_study.config.version) < 870: matrix_id_fmts = {"{bc_id}"} else: matrix_id_fmts = {"{bc_id}_eq", "{bc_id}_lt", "{bc_id}_gt"} - references_by_shapes = collections.defaultdict(list) + references_by_width: Dict[int, List[Tuple[str, str]]] = {} _total = len(bcs) * len(matrix_id_fmts) for _index, (bc, fmt) in enumerate(itertools.product(bcs, matrix_id_fmts), 1): + bc_id = bc.id matrix_id = fmt.format(bc_id=bc.id) - logger.info(f"⏲ Validating BC '{bc.id}': {matrix_id=} [{_index}/{_total}]") - _obj = file_study.tree.get(url=["input", "bindingconstraints", matrix_id]) - _array = np.array(_obj["data"], dtype=float) - if _array.size == 0 or _array.shape[1] == 1: + logger.info(f"⏲ Validating BC '{bc_id}': {matrix_id=} [{_index}/{_total}]") + obj = file_study.tree.get(url=["input", "bindingconstraints", matrix_id]) + matrix = np.array(obj["data"], dtype=float) + # We ignore empty matrices as there are default matrices for the simulator. + if not matrix.size: continue - references_by_shapes[_array.shape].append((bc.id, matrix_id)) - del _obj - del _array - - if len(references_by_shapes) > 1: - most_common = collections.Counter(references_by_shapes.keys()).most_common() - invalid_constraints = collections.defaultdict(list) - for shape, _ in most_common[1:]: - references = references_by_shapes[shape] + + matrix_height = matrix.shape[0] + expected_height = EXPECTED_MATRIX_SHAPES[bc.time_step][0] + if matrix_height != expected_height: + raise WrongMatrixHeightError( + f"The binding constraint '{bc.name}' should have {expected_height} rows, currently: {matrix_height}" + ) + matrix_width = matrix.shape[1] + if matrix_width > 1: + references_by_width.setdefault(matrix_width, []).append((bc_id, matrix_id)) + + return references_by_width + + +def _validate_binding_constraints(file_study: FileStudy, bcs: Sequence[ConstraintOutput]) -> bool: + """ + Validates the binding constraints within a group. + """ + references_by_widths = _get_references_by_widths(file_study, bcs) + + if len(references_by_widths) > 1: + most_common = collections.Counter(references_by_widths.keys()).most_common() + invalid_constraints: Dict[str, str] = {} + + for width, _ in most_common[1:]: + references = references_by_widths[width] for bc_id, matrix_id in references: - invalid_constraints[bc_id].append(f"'{matrix_id}' {shape}") - expected_shape = most_common[0][0] - detail = { - "msg": f"Matrix shapes mismatch in binding constraints group. Expected shape: {expected_shape}", - "invalid_constraints": dict(invalid_constraints), - } - raise IncoherenceBetweenMatricesLength(detail) + existing_key = invalid_constraints.get(bc_id, "") + if existing_key: + existing_key += ", " + existing_key += f"'{matrix_id}' has {width} columns" + invalid_constraints[bc_id] = existing_key + + expected_width = most_common[0][0] + raise MatrixWidthMismatchError( + f"Mismatch widths: the most common width in the group is {expected_width}" + f" but we have: {invalid_constraints!r}" + ) return True @@ -618,11 +650,12 @@ def validate_constraint_groups(self, study: Study) -> bool: for group_name, bcs in grouped_constraints.items(): try: _validate_binding_constraints(file_study, bcs) - except IncoherenceBetweenMatricesLength as e: + except MatrixWidthMismatchError as e: invalid_groups[group_name] = e.detail if invalid_groups: - raise IncoherenceBetweenMatricesLength(invalid_groups) + err_msg = ", ".join(f"'{grp}': {msg}" for grp, msg in sorted(invalid_groups.items())) + raise MatrixWidthMismatchError(err_msg) return True diff --git a/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py b/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py index 7e1b4d3eaa..4bdfc714c4 100644 --- a/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py +++ b/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py @@ -22,17 +22,14 @@ from antarest.study.storage.variantstudy.model.command.icommand import MATCH_SIGNATURE_SEPARATOR, ICommand from antarest.study.storage.variantstudy.model.model import CommandDTO -__all__ = ( - "AbstractBindingConstraintCommand", - "CreateBindingConstraint", - "check_matrix_values", - "BindingConstraintProperties", - "BindingConstraintProperties870", - "BindingConstraintMatrices", -) - MatrixType = t.List[t.List[MatrixData]] +EXPECTED_MATRIX_SHAPES = { + BindingConstraintFrequency.HOURLY: (8784, 3), + BindingConstraintFrequency.DAILY: (366, 3), + BindingConstraintFrequency.WEEKLY: (366, 3), +} + def check_matrix_values(time_step: BindingConstraintFrequency, values: MatrixType, version: int) -> None: """ @@ -53,14 +50,9 @@ def check_matrix_values(time_step: BindingConstraintFrequency, values: MatrixTyp # Also, we use the same matrices for "weekly" and "daily" frequencies, # because the solver calculates the weekly matrix from the daily matrix. # See https://github.com/AntaresSimulatorTeam/AntaREST/issues/1843 - shapes = { - BindingConstraintFrequency.HOURLY: (8784, 3), - BindingConstraintFrequency.DAILY: (366, 3), - BindingConstraintFrequency.WEEKLY: (366, 3), - } # Check the matrix values and create the corresponding matrix link array = np.array(values, dtype=np.float64) - expected_shape = shapes[time_step] + expected_shape = EXPECTED_MATRIX_SHAPES[time_step] actual_shape = array.shape if version < 870: if actual_shape != expected_shape: diff --git a/antarest/study/storage/variantstudy/model/command/update_binding_constraint.py b/antarest/study/storage/variantstudy/model/command/update_binding_constraint.py index 211ea2f848..f1218d86fc 100644 --- a/antarest/study/storage/variantstudy/model/command/update_binding_constraint.py +++ b/antarest/study/storage/variantstudy/model/command/update_binding_constraint.py @@ -10,8 +10,6 @@ from antarest.study.storage.variantstudy.model.command.icommand import MATCH_SIGNATURE_SEPARATOR, ICommand from antarest.study.storage.variantstudy.model.model import CommandDTO -__all__ = ("UpdateBindingConstraint",) - MatrixType = List[List[MatrixData]] diff --git a/tests/integration/study_data_blueprint/test_binding_constraints.py b/tests/integration/study_data_blueprint/test_binding_constraints.py index 8f6fa015e4..ed86cba404 100644 --- a/tests/integration/study_data_blueprint/test_binding_constraints.py +++ b/tests/integration/study_data_blueprint/test_binding_constraints.py @@ -1,4 +1,8 @@ +import io +import re + import numpy as np +import pandas as pd import pytest from starlette.testclient import TestClient @@ -62,6 +66,21 @@ def test_constraint_id__other(self) -> None: assert term.generate_id() == "foo" +def _upload_matrix( + client: TestClient, user_access_token: str, study_id: str, matrix_path: str, df: pd.DataFrame +) -> None: + tsv = io.BytesIO() + df.to_csv(tsv, sep="\t", index=False, header=False) + tsv.seek(0) + res = client.put( + f"/v1/studies/{study_id}/raw", + params={"path": matrix_path}, + headers={"Authorization": f"Bearer {user_access_token}"}, + files={"file": tsv}, + ) + res.raise_for_status() + + @pytest.mark.unit_test class TestBindingConstraints: """ @@ -802,9 +821,8 @@ def test_for_version_870(self, client: TestClient, admin_access_token: str, stud assert "(8784, 2)" in description # - # Creation of 2 BC inside the same group with different columns size - # "First BC": 3 cols, "Group 1" -> OK - # "Second BC": 4 cols, "Group 1" -> OK, but should fail in group validation + # Creation of 1 BC + # Update raw with wrong columns size -> OK but validation should fail # matrix_lt3 = np.ones((8784, 3)) @@ -819,6 +837,47 @@ def test_for_version_870(self, client: TestClient, admin_access_token: str, stud headers=admin_headers, ) assert res.status_code in {200, 201}, res.json() + first_bc_id = res.json()["id"] + + generator = np.random.default_rng(11) + random_matrix = pd.DataFrame(generator.integers(0, 10, size=(4, 1))) + _upload_matrix( + client, + admin_access_token, + study_id, + f"input/bindingconstraints/{first_bc_id}_gt", + random_matrix, + ) + + # Validation should fail + res = client.get( + f"/v1/studies/{study_id}/constraint-groups/Group 1/validate", + headers=admin_headers, + ) + assert res.status_code == 422 + obj = res.json() + assert obj["exception"] == "WrongMatrixHeightError" + assert obj["description"] == "The binding constraint 'First BC' should have 8784 rows, currently: 4" + + # So, we correct the shape of the matrix + res = client.put( + f"/v1/studies/{study_id}/bindingconstraints/{first_bc_id}", + json={"greater_term_matrix": matrix_lt3.tolist()}, + headers=admin_headers, + ) + assert res.status_code in {200, 201}, res.json() + + # + # Creation of another BC inside the same group with different columns size + # "Second BC": 4 cols, "Group 1" -> OK, but should fail in group validation + # + + # Asserts everything is ok. + res = client.get( + f"/v1/studies/{study_id}/constraint-groups/Group 1/validate", + headers=admin_headers, + ) + assert res.status_code == 200, res.json() matrix_gt4 = np.ones((8784, 4)) # Wrong number of columns res = client.post( @@ -837,12 +896,10 @@ def test_for_version_870(self, client: TestClient, admin_access_token: str, stud # validate the BC group "Group 1" res = client.get(f"/v1/studies/{study_id}/constraint-groups/Group 1/validate", headers=admin_headers) assert res.status_code == 422, res.json() - assert res.json()["exception"] == "IncoherenceBetweenMatricesLength" + assert res.json()["exception"] == "MatrixWidthMismatchError" description = res.json()["description"] - assert description == { - "invalid_constraints": {"second bc": ["'second bc_gt' (8784, 4)"]}, - "msg": "Matrix shapes mismatch in binding constraints group. Expected shape: (8784, 3)", - } + assert re.search(r"the most common width in the group is 3", description, flags=re.IGNORECASE) + assert re.search(r"'second bc_gt' has 4 columns", description, flags=re.IGNORECASE) # So, we correct the shape of the matrix of the Second BC res = client.put( @@ -884,12 +941,10 @@ def test_for_version_870(self, client: TestClient, admin_access_token: str, stud # validate the BC group "Group 1" res = client.get(f"/v1/studies/{study_id}/constraint-groups/Group 1/validate", headers=admin_headers) assert res.status_code == 422, res.json() - assert res.json()["exception"] == "IncoherenceBetweenMatricesLength" + assert res.json()["exception"] == "MatrixWidthMismatchError" description = res.json()["description"] - assert description == { - "invalid_constraints": {"third bc": ["'third bc_lt' (8784, 4)"]}, - "msg": "Matrix shapes mismatch in binding constraints group. Expected shape: (8784, 3)", - } + assert re.search(r"the most common width in the group is 3", description, flags=re.IGNORECASE) + assert re.search(r"'third bc_lt' has 4 columns", description, flags=re.IGNORECASE) # So, we correct the shape of the matrix of the Second BC res = client.put( @@ -949,10 +1004,7 @@ def test_for_version_870(self, client: TestClient, admin_access_token: str, stud assert res.status_code == 422, res.json() exception = res.json()["exception"] description = res.json()["description"] - assert exception == "IncoherenceBetweenMatricesLength" - assert description == { - "Group 1": { - "msg": "Matrix shapes mismatch in binding constraints group. Expected shape: (8784, 3)", - "invalid_constraints": {"third bc": ["'third bc_lt' (8784, 4)"]}, - } - } + assert exception == "MatrixWidthMismatchError" + assert re.search(r"'Group 1':", description, flags=re.IGNORECASE) + assert re.search(r"the most common width in the group is 3", description, flags=re.IGNORECASE) + assert re.search(r"'third bc_lt' has 4 columns", description, flags=re.IGNORECASE) diff --git a/tests/integration/study_data_blueprint/test_thermal.py b/tests/integration/study_data_blueprint/test_thermal.py index 00fcc25aff..e3f62eca1e 100644 --- a/tests/integration/study_data_blueprint/test_thermal.py +++ b/tests/integration/study_data_blueprint/test_thermal.py @@ -571,7 +571,7 @@ def test_lifecycle( ) assert res.status_code == 422 obj = res.json() - assert obj["exception"] == "IncoherenceBetweenMatricesLength" + assert obj["exception"] == "WrongMatrixHeightError" assert obj["description"] == "The matrix series should have 8760 rows, currently: 4" # Update with the right length @@ -608,8 +608,8 @@ def test_lifecycle( ) assert res.status_code == 422 obj = res.json() - assert obj["exception"] == "IncoherenceBetweenMatricesLength" - pattern = ".*'series'.*4.*'CO2Cost'.*3" + assert obj["exception"] == "MatrixWidthMismatchError" + pattern = r".*'series'.*4.*'CO2Cost'.*3" assert re.match(pattern, obj["description"]) # ============================= From 33ae79a231f3d2685bdab019f0b7339857954eb2 Mon Sep 17 00:00:00 2001 From: MartinBelthle <102529366+MartinBelthle@users.noreply.github.com> Date: Tue, 16 Apr 2024 15:01:18 +0200 Subject: [PATCH 023/147] feat(simulator): API change to add support for study version 8.8 (#2006) --- .../business/areas/st_storage_management.py | 57 +++++++++--------- .../business/thematic_trimming_field_infos.py | 2 +- antarest/study/model.py | 3 +- .../model/filesystem/config/st_storage.py | 47 +++++++++++---- .../study/storage/study_upgrader/__init__.py | 22 ++++--- .../storage/study_upgrader/upgrader_880.py | 32 ++++++++++ resources/empty_study_880.zip | Bin 0 -> 64609 bytes .../study_data_blueprint/test_st_storage.py | 33 ++++++++-- .../business/test_study_version_upgrader.py | 20 +++++- .../study_upgrader/test_upgrade_880.py | 17 ++++++ .../little_study_860.expected.zip | Bin 128048 -> 128003 bytes .../little_study_870.expected.zip | Bin 0 -> 135401 bytes .../nominal_case/little_study_870.zip | Bin 0 -> 135280 bytes .../areas/test_st_storage_management.py | 4 ++ 14 files changed, 179 insertions(+), 58 deletions(-) create mode 100644 antarest/study/storage/study_upgrader/upgrader_880.py create mode 100644 resources/empty_study_880.zip create mode 100644 tests/storage/study_upgrader/test_upgrade_880.py create mode 100644 tests/storage/study_upgrader/upgrade_880/nominal_case/little_study_870.expected.zip create mode 100644 tests/storage/study_upgrader/upgrade_880/nominal_case/little_study_870.zip diff --git a/antarest/study/business/areas/st_storage_management.py b/antarest/study/business/areas/st_storage_management.py index c6d4e9f868..85ceb41f1c 100644 --- a/antarest/study/business/areas/st_storage_management.py +++ b/antarest/study/business/areas/st_storage_management.py @@ -19,9 +19,10 @@ from antarest.study.model import Study from antarest.study.storage.rawstudy.model.filesystem.config.model import transform_name_to_id from antarest.study.storage.rawstudy.model.filesystem.config.st_storage import ( - STStorageConfig, + STStorage880Config, + STStorage880Properties, + STStorageConfigType, STStorageGroup, - STStorageProperties, create_st_storage_config, ) from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy @@ -32,18 +33,9 @@ from antarest.study.storage.variantstudy.model.command.replace_matrix import ReplaceMatrix from antarest.study.storage.variantstudy.model.command.update_config import UpdateConfig -__all__ = ( - "STStorageManager", - "STStorageCreation", - "STStorageInput", - "STStorageOutput", - "STStorageMatrix", - "STStorageTimeSeries", -) - @camel_case_model -class STStorageInput(STStorageProperties, metaclass=AllOptionalMetaclass, use_none=True): +class STStorageInput(STStorage880Properties, metaclass=AllOptionalMetaclass, use_none=True): """ Model representing the form used to EDIT an existing short-term storage. """ @@ -79,13 +71,13 @@ def validate_name(cls, name: t.Optional[str]) -> str: return name # noinspection PyUnusedLocal - def to_config(self, study_version: t.Union[str, int]) -> STStorageConfig: + def to_config(self, study_version: t.Union[str, int]) -> STStorageConfigType: values = self.dict(by_alias=False, exclude_none=True) - return STStorageConfig(**values) + return create_st_storage_config(study_version=study_version, **values) @camel_case_model -class STStorageOutput(STStorageConfig): +class STStorageOutput(STStorage880Config): """ Model representing the form used to display the details of a short-term storage entry. """ @@ -104,12 +96,6 @@ def schema_extra(schema: t.MutableMapping[str, t.Any]) -> None: initial_level_optim=True, ) - @classmethod - def from_config(cls, storage_id: str, config: t.Mapping[str, t.Any]) -> "STStorageOutput": - storage = STStorageConfig(**config, id=storage_id) - values = storage.dict(by_alias=False) - return cls(**values) - # ============= # Time series @@ -241,6 +227,16 @@ def _get_values_by_ids(file_study: FileStudy, area_id: str) -> t.Mapping[str, t. raise STStorageConfigNotFound(path, area_id) from None +def create_storage_output( + study_version: t.Union[str, int], + cluster_id: str, + config: t.Mapping[str, t.Any], +) -> "STStorageOutput": + obj = create_st_storage_config(study_version=study_version, **config, id=cluster_id) + kwargs = obj.dict(by_alias=False) + return STStorageOutput(**kwargs) + + class STStorageManager: """ Manage short-term storage configuration in a study @@ -291,7 +287,7 @@ def create_storage( output = self.get_storage(study, area_id, storage_id=storage.id) return output - def _make_create_cluster_cmd(self, area_id: str, cluster: STStorageConfig) -> CreateSTStorage: + def _make_create_cluster_cmd(self, area_id: str, cluster: STStorageConfigType) -> CreateSTStorage: command = CreateSTStorage( area_id=area_id, parameters=cluster, @@ -326,11 +322,9 @@ def get_storages( # Sort STStorageConfig by groups and then by name order_by = operator.attrgetter("group", "name") - all_configs = sorted( - (STStorageConfig(id=storage_id, **options) for storage_id, options in config.items()), - key=order_by, - ) - return tuple(STStorageOutput(**config.dict(by_alias=False)) for config in all_configs) + study_version = int(study.version) + storages = [create_storage_output(study_version, storage_id, options) for storage_id, options in config.items()] + return sorted(storages, key=order_by) def get_storage( self, @@ -356,7 +350,7 @@ def get_storage( config = file_study.tree.get(path.split("/"), depth=1) except KeyError: raise STStorageNotFound(path, storage_id) from None - return STStorageOutput.from_config(storage_id, config) + return create_storage_output(int(study.version), storage_id, config) def update_storage( self, @@ -469,7 +463,12 @@ def duplicate_cluster(self, study: Study, area_id: str, source_id: str, new_clus # Cluster duplication current_cluster = self.get_storage(study, area_id, source_id) current_cluster.name = new_cluster_name - creation_form = STStorageCreation(**current_cluster.dict(by_alias=False, exclude={"id"})) + fields_to_exclude = {"id"} + # We should remove the field 'enabled' for studies before v8.8 as it didn't exist + if int(study.version) < 880: + fields_to_exclude.add("enabled") + creation_form = STStorageCreation(**current_cluster.dict(by_alias=False, exclude=fields_to_exclude)) + new_config = creation_form.to_config(study.version) create_cluster_cmd = self._make_create_cluster_cmd(area_id, new_config) diff --git a/antarest/study/business/thematic_trimming_field_infos.py b/antarest/study/business/thematic_trimming_field_infos.py index 30d95a9393..764c2c9590 100644 --- a/antarest/study/business/thematic_trimming_field_infos.py +++ b/antarest/study/business/thematic_trimming_field_infos.py @@ -191,7 +191,7 @@ class ThematicTrimmingFormFields(FormFieldsBaseModel, metaclass=AllOptionalMetac "sts_inj_by_plant": {"topic": _SHORT_TERM_STORAGES, "path": "STS inj by plant", "default_value": True, "start_version": 860}, "sts_withdrawal_by_plant": {"topic": _SHORT_TERM_STORAGES, "path": "STS withdrawal by plant", "default_value": True, "start_version": 860}, "sts_lvl_by_plant": {"topic": _SHORT_TERM_STORAGES, "path": "STS lvl by plant", "default_value": True, "start_version": 860}, - "sts_cashflow_by_cluster": {"topic": _SHORT_TERM_STORAGES, "path": "STS Cashflow By Cluster", "default_value": True, "start_version": 860}, + "sts_cashflow_by_cluster": {"topic": _SHORT_TERM_STORAGES, "path": "STS Cashflow By Cluster", "default_value": True, "start_version": 880}, # topic: "Short-Term Storages - Group" "psp_open_injection": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "PSP_open_injection", "default_value": True, "start_version": 860}, "psp_open_withdrawal": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "PSP_open_withdrawal", "default_value": True, "start_version": 860}, diff --git a/antarest/study/model.py b/antarest/study/model.py index 40737debad..ad3d1f0fb4 100644 --- a/antarest/study/model.py +++ b/antarest/study/model.py @@ -46,9 +46,10 @@ "850": "empty_study_850.zip", "860": "empty_study_860.zip", "870": "empty_study_870.zip", + "880": "empty_study_880.zip", } -NEW_DEFAULT_STUDY_VERSION: str = "870" +NEW_DEFAULT_STUDY_VERSION: str = "880" class StudyGroup(Base): # type:ignore diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/st_storage.py b/antarest/study/storage/rawstudy/model/filesystem/config/st_storage.py index 58efc0ceb8..61a644b3a5 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/config/st_storage.py +++ b/antarest/study/storage/rawstudy/model/filesystem/config/st_storage.py @@ -6,14 +6,6 @@ from antarest.study.storage.rawstudy.model.filesystem.config.cluster import ItemProperties from antarest.study.storage.rawstudy.model.filesystem.config.identifier import LowerCaseIdentifier -__all__ = ( - "STStorageGroup", - "STStorageProperties", - "STStorageConfig", - "STStorageConfigType", - "create_st_storage_config", -) - class STStorageGroup(EnumIgnoreCase): """ @@ -75,7 +67,7 @@ class STStorageProperties(ItemProperties): ge=0, le=1, ) - # The `initial_level` value must be between 0 and 1, but the default value is 0. + # The `initial_level` value must be between 0 and 1, but the default value is 0.5 initial_level: float = Field( 0.5, description="Initial level of the storage system (%)", @@ -90,6 +82,17 @@ class STStorageProperties(ItemProperties): ) +class STStorage880Properties(STStorageProperties): + """ + Short term storage configuration model for 880 study. + """ + + # Activity status: + # - True: the plant may generate. + # - False: Ignored by the simulator. + enabled: bool = Field(default=True, description="Activity status") + + # noinspection SpellCheckingInspection class STStorageConfig(STStorageProperties, LowerCaseIdentifier): """ @@ -116,7 +119,27 @@ class STStorageConfig(STStorageProperties, LowerCaseIdentifier): """ -STStorageConfigType = STStorageConfig +class STStorage880Config(STStorage880Properties, LowerCaseIdentifier): + """ + Short Term Storage properties for study in version 8.8 or above. + + Usage: + + >>> from antarest.study.storage.rawstudy.model.filesystem.config.st_storage import STStorage880Config + + >>> st = STStorage880Config(name="Storage 1", group="battery", enabled=False) + >>> st.id + 'storage 1' + >>> st.group == STStorageGroup.BATTERY + True + >>> st.enabled + False + """ + + +# NOTE: In the following Union, it is important to place the older version first, +# because otherwise, creating a short term storage always creates a v8.8 one. +STStorageConfigType = t.Union[STStorageConfig, STStorage880Config] def create_st_storage_config(study_version: t.Union[str, int], **kwargs: t.Any) -> STStorageConfigType: @@ -136,4 +159,6 @@ def create_st_storage_config(study_version: t.Union[str, int], **kwargs: t.Any) version = int(study_version) if version < 860: raise ValueError(f"Unsupported study version: {version}") - return STStorageConfig(**kwargs) + elif version < 880: + return STStorageConfig(**kwargs) + return STStorage880Config(**kwargs) diff --git a/antarest/study/storage/study_upgrader/__init__.py b/antarest/study/storage/study_upgrader/__init__.py index 1ff75f64be..9ea738a1f3 100644 --- a/antarest/study/storage/study_upgrader/__init__.py +++ b/antarest/study/storage/study_upgrader/__init__.py @@ -7,10 +7,8 @@ from http import HTTPStatus from http.client import HTTPException from pathlib import Path -from typing import Callable, List, NamedTuple from antarest.core.exceptions import StudyValidationError - from .upgrader_710 import upgrade_710 from .upgrader_720 import upgrade_720 from .upgrader_800 import upgrade_800 @@ -21,6 +19,12 @@ from .upgrader_850 import upgrade_850 from .upgrader_860 import upgrade_860 from .upgrader_870 import upgrade_870 +from .upgrader_880 import upgrade_880 + +STUDY_ANTARES = "study.antares" +""" +Main file of an Antares study containing the caption, the version, the creation date, etc. +""" logger = logging.getLogger(__name__) @@ -47,6 +51,7 @@ class UpgradeMethod(t.NamedTuple): UpgradeMethod("840", "850", upgrade_850, [_GENERAL_DATA_PATH]), UpgradeMethod("850", "860", upgrade_860, [Path("input"), _GENERAL_DATA_PATH]), UpgradeMethod("860", "870", upgrade_870, [Path("input/thermal"), Path("input/bindingconstraints")]), + UpgradeMethod("870", "880", upgrade_880, [Path("input/st-storage/clusters")]), ] @@ -105,7 +110,7 @@ def get_current_version(study_path: Path) -> str: `study.antares` file or does not match the expected format. """ - antares_path = study_path / "study.antares" + antares_path = study_path / STUDY_ANTARES pattern = r"version\s*=\s*([\w.-]+)\s*" with antares_path.open(encoding="utf-8") as lines: for line in lines: @@ -163,8 +168,8 @@ def can_upgrade_version(from_version: str, to_version: str) -> t.List[Path]: def _update_study_antares_file(target_version: str, study_path: Path) -> None: - file = study_path / "study.antares" - content = file.read_text(encoding="utf-8") + antares_path = study_path / STUDY_ANTARES + content = antares_path.read_text(encoding="utf-8") content = re.sub( r"^version\s*=.*$", f"version = {target_version}", @@ -177,7 +182,7 @@ def _update_study_antares_file(target_version: str, study_path: Path) -> None: content, flags=re.MULTILINE, ) - file.write_text(content, encoding="utf-8") + antares_path.write_text(content, encoding="utf-8") def _copies_only_necessary_files(files_to_upgrade: t.List[Path], study_path: Path, tmp_path: Path) -> t.List[Path]: @@ -192,10 +197,13 @@ def _copies_only_necessary_files(files_to_upgrade: t.List[Path], study_path: Pat without any children that has parents already in the list. """ files_to_copy = _filters_out_children_files(files_to_upgrade) - files_to_copy.append(Path("study.antares")) + files_to_copy.append(Path(STUDY_ANTARES)) files_to_retrieve = [] for path in files_to_copy: entire_path = study_path / path + if not entire_path.exists(): + # This can happen when upgrading a study to v8.8. + continue if entire_path.is_dir(): if not (tmp_path / path).exists(): shutil.copytree(entire_path, tmp_path / path, dirs_exist_ok=True) diff --git a/antarest/study/storage/study_upgrader/upgrader_880.py b/antarest/study/storage/study_upgrader/upgrader_880.py new file mode 100644 index 0000000000..0de50cff4b --- /dev/null +++ b/antarest/study/storage/study_upgrader/upgrader_880.py @@ -0,0 +1,32 @@ +import glob +from pathlib import Path + +from antarest.study.storage.rawstudy.ini_reader import IniReader +from antarest.study.storage.rawstudy.ini_writer import IniWriter +from antarest.study.storage.rawstudy.model.filesystem.root.settings.generaldata import DUPLICATE_KEYS + + +# noinspection SpellCheckingInspection +def upgrade_880(study_path: Path) -> None: + """ + Upgrade the study configuration to version 880. + + NOTE: + The file `study.antares` is not upgraded here. + + Args: + study_path: path to the study directory. + """ + st_storage_path = study_path / "input" / "st-storage" / "clusters" + if not st_storage_path.exists(): + # The folder only exists for studies in v8.6+ that have some short term storage clusters. + # For every other case, this upgrader has nothing to do. + return + writer = IniWriter(special_keys=DUPLICATE_KEYS) + cluster_files = glob.glob(str(st_storage_path / "*" / "list.ini")) + for file in cluster_files: + file_path = Path(file) + cluster_list = IniReader().read(file_path) + for cluster in cluster_list: + cluster_list[cluster]["enabled"] = True + writer.write(cluster_list, file_path) diff --git a/resources/empty_study_880.zip b/resources/empty_study_880.zip new file mode 100644 index 0000000000000000000000000000000000000000..3667408c59c3839d51f785f484464d892f90f46e GIT binary patch literal 64609 zcmce61z6PUw*J7t49HMYLxY4gjC7B53J6L{r!+%K4IR=V-J+nPlz?=13)0;sC5^y; z-20rp&+(pf&%XEnJa>3t;2ECxd)Hd;x4yO3?^9O>Vh{tcP+yxNCXWGsdwHVXaR77x z84HB1tD_T_jf2f2EnEON3~^&ddRN;bnpxYz%@YTJh9PFvX7k$*rrzdTa5w>SvGx$} z5~G$&si&D&Na;(O#nb+tsHJbXG5)^(j$R>9-;9)aDT+ggZyu6R#SAJFN9qoC62Zsc z(HS_eeCij@*ZR6_X!MEGxV0j)xXS5NPf|sIeX{qDxl%R~Np)o~mb=iVv;~UaPmb!p zvHwZ>wr;N#Kpn{@jFrt z{fXI6lz)r#)0ZR;|BOWZ3zD4;!j;R_)AcuQZ1^W;(5Fu3{{j(+vJi~*(;oDnKTxmV z+5WnQ3+j_!*FadfBK`>6&o%x{;KX}NagjU_nc+E2b2PaeV~~=JGdhv1%WZZ6%;{y& zl*f%gI3pzVn=8EjC~57JkAi&h%XIyPcl&`*7n+PkJJzKewJY~}@fI9Y74?@^HIYKG z@Qih0&S9LN++XC5IP0O<{bcy>Jx%_He@ty0%xxU3%p4sMt}Z4v4zB+bxAUnwYopfr z6=P&WI2rbqM(sF|a^{oeuixBddlUjeE9HK3V^lZly) ztJm*T|0D9R+WzTTl3$R2^THqD`d@6IpP;{P_h0?tu^RIxB{UU}xWrc9GGjaJ{4Mu+j=~p%UsrcWK{=en$=WhNd z!2b(9{OYA&ZTm+-{3T~Wf2o!T{}2Sil@sCW=wf1J@w@NoP=(WfTkfFJ4uI?zIEsG- zXJ+SyaJ6vxTdJV>E4aVXg#Ihse>DCtvE}+jb9jHCx>{Sf*qhk>Zh6FiQr)iy75h(5 z{1>d>O2@yH=ehdXA8@jvQoZ8Od|#|Gz|9r0x)jO9)z#OnC`*X(Te>%#-uAnLTIlCu zf2Mo3UrhS1H2&H9$p2&;3LU`l3+SJR@ZbIPk9#WQuR#9l`u_^~kJ0>(5%|fH;un^G z9w#2CQu6mun*SAUzb24BlkRUo|7h#~sv7w7IQd7s|BLkTtBZcI>>pkJzw#6qC3-s( zuRr$o|JXe;s5t&iL4O1J{p~O9nE}Nj6;8I8)qCnCkDU6t7b732aCLJiAERM+y3jj| zJwdWTK1cE%_6BSDMJi~d~r&s|jb4;?V-0|4evN{&{4%UBRp z@B1G!p~5eR<4@N(y1D+*hj#y!3h7j%=AZfqY!nEpn{!3=MStT`_P2KN-!>7y=B915 z-;3x!V15?+7O3v(pSsY0Q-NB<-**V&A&@;44&k2_fahzevZ=VgtZ_g9z#y}3&$v|L zjZIrRnfwZ}F|nI-3jczFhRP*MU)wIl30Z6`S7DM48|?Ms({E@GCYg_!i=p0raWRxK zJWd~iBdfihFMr(5`?#l)8$TUh7iz~KGsRN;Nn6$8D_ooOmG*?1sz6RtL4S$p) zn{uaXv9^D8xNrREUTA=OH-%vdW9*CZjc2dQ-#s>@tg6rfy-!~EbrGA%&0fEdPEGUQ zk?PZ$KS&uDDXqpc@8eKj-b~>?2(o@Oz#8WNJUO;kpz&v8+tcBcOo;O8PtX1?p`Skg z$Iw|}0{}lytUr8?sZF8@KyD>UF0Sgv@?tKLQ?=ZVRcB&hpQ` zPPbfN#*~$!cq21V7L2BuvpbgWq zJy_G77C*X|_pT+v?R?$dO+a^DG~UioutHO4BqbyK9~H9Udf=yKHgp<~(%}iL-tmRJ zej%VBLAJ0WlIKw@bN9?Jkm{}9;Sc3%l|acAzc_8VpnJDcSM|pY@{)Bm)w?^|4SAU+ zMfruM?@@ku%@BL*6G!(m$5!Y2F~qTo$FrR})>SbaB-_|w`aN81BPqI+waxjTOB<$2 z+++p1*{e#|H}u-^?+nu~cHy+KPN*lK*S-lkysPv?q9D#voLz9;)8GF&M}$MJ@HA(c zk{1q_m!Lu_bfzSZCCh%p){AiLw3PMM8y)5HRkVk>cGV5EZxi5}L^}Tct~OapHRbE^ zz~B?(_69-mh7tj9dB=NiomM1DYcQ;DO-^Sl-l^DnS!uB{B>N#lrsrjulR$ps|y}v z+OO2GzD`+5{URZN%OWlL{!8meZCMZQUe=x;iT$54Vh4I}TNDm-13K`q&E`C$D`!No z!iHBVU1^+#KYP05`VoHVhk3IN$$ggCTvq3KGtA+J{`Ma82*vq&RpU)HVSqV_LMFGe zl1ILp9eJ)aZkk1np0t?2F!>6uV%~>`<64PsS?CEHB|T17?_$my8f%SYq3K>9x;0v| zgwuVQ6llp@=YpN@SyA=oC0^EBEbbQ*gqhTC$@ADl~OZ1VBpvG<3esJ|@VzpaF z&5Kugd8%KAQ>lzTI4^$`r^>S@pVUyDXw~`X>-zZnV|6|GL46$W3X)qOo*7L5V$ec$ z@az56J5wCpL1y$^k4({q@eD_L6J>Y1pm>>UA$z7$i?6>7e-uwLK$z>$>)z^t`*=$~@XyqD+F2S>ZfohL1Q?!*pt=ESs- zwd`#y(ddU4tQNs5n?EtGMwl64gulmTVDx(1-lm$N$FjDW;9a=PSngY3L;ZYR#r*KW zZo@LctpImPLr$xpS!|i4sQlGWXW0_I=QRRGH1Yc8elmX3Qpp8N&Nu^V?FF{hj478F zpFmH$D)utljVw=uy1Wkj3~Nn*B2~xdS6eZ#_n1G*K=R-EhsODjB%gvgU%fiR|CvZq zU>Z5gKmg!py7==f`n4MQS@QppNKlQIqnnG_-#e}U@p+qcga-IItoav!2v;|AFD@H1 z$Nw~%-0=nLXlZ}HMpxdW*2oXfLy8NS5=WCnQxjmy5^88*U}~sG21X)XOLAFq9)82c znIQ2VpNOVVjw&7ZytW#ct+lJJ_4d$xv3GeRk+qI7xiediSg+?rSkCc=wQ4LDBBG*% z&Z?-bZYQ@MEIA3eyN}LO9&Cg6ZFhG^=li`~goyfuGtLi2=j)@igp})rSHRa22`!5T zkcW$Wo&J=~>-&buM?}}W1^qh`8MUxRr=b=?s!yZIK`!V*)?bN9KDpOWuw}gsP_ndr z67=r{OWH z{0)?%;X$O$6@H@?=gn-^lQR6of(p3(v^}!et^DetjRI3tR#ESQi)Aa`?{rt#ppOr1 z76i(PBGeb~*;pJ(K(`RhgHOS*3vY_xn6w~uNsRtNGDTxLOJ}r#=V)xO-opf z);`lQxaFCXMu(8ND_#Gs4{1-MD!HtY4-P)oYwgH95q*Sd0WOSqQj5rCRf6V5m!U(3 zI)p8)DGkz%Jib*twE9G0RLhn4JWqUd-{Z0DOW0^d`m&08A06jm>PW3kPTzqXOIfkN zxp!$SeZv^$?9^z2L9K)oQOJ+{$RGOebE4xxXNKuHHczUpH=(|VnJ?ri7rdzV>r^!? z?4&uQ&3x?HT^;rHUF>ZV@scv1zqu|91(Wsf7ASrc&zQyfYCe_Iomsu{qvp-V;le+>!yw=Gh(Tq~xm0@uC7VxR#Z2pbTsou&I(mm0S%h5Wd_?yhfkkWJyd(}{Wkp#0K zHq#>Y!dj3LrESzlJL?#Sg*_i{1pB?cMT5J!wkA8uA8Jq9Zi%{Dx)FU6FC}EdDwh%L zd2&$kVfc#rOmy%3{*E5>Y^qisVLzn!*$HMfL3t16_a;@i-&ICa~{W!%q|xD6*< zH&fB06&{tJ$W7Ov)XGcZ!b4lIWxL z{%7*+)ZI21m0zf*H@B?upYCEp5J6Y@ zYmW)DL^IlF&dAZHA%ldel#=-9h#=|-Mj^q89VUquL~#o0S2B_T;}q|#{TWfRw7B(T zF9*t}yCjuuV?o)u>km$He?;e3lX<_*B^TFTpe4Y(jOZqycHZ{59>Fi_4%UYqg+ET5dbT@lgCsO|@D?A8x~Ar`?w?w$}_- z-m5aF?eoL?eAxMV^@(0a3Q}uE>JM7@o4dkqcdcLBQiEhKiZXHe8^UCO&QMAOc z-EY{><{z5kA=;tERj+A8W6QRUuz$dCdt4!n@<;r}j4Ur-N%qI*U%O^teTd?8li)y+ zcQA?Aqm&uUCDNu>G#x&gxz&!3Dv8s*?F*T2re6E$MIp5Nc+kH?LdTOkkgDrbh~1f`Ch2@f-t(uoVUxsHlEMrI4j6H98*_S3$JHLF;wtJaUN z-m|w2y0V&`p=7AgaL7p-y6Vk-*Li9}C)h`Q_(CM2)7YU&&oGYAYcF@FLkeNU@>I^W z_5E2EQ8hcpy2Ept9lN`Hf^F%&frXbOJ8wdn-PM`C-=C?u+nq6kh|_m+X!3gGBJI*Y zMZUJdkwt0xwde~0i_w%SuKbSO$dH~yDW zl-&qNvMkD_ftnQAY+CMqN(FJC(;&Tr&c@r;Z2w)j{glV|b&2O!SY6_;pNm}2>1{lX z=$g^rUJHd@d4!95!MHVxD*ciO9X}Y$EWJLCe`9ShE#jTQBZWV$N4X@qSbaXFg}7lBs)EZDC0Lbp-KQ`z!5u?8#VlSOTB$mWQ*q)!{A;2y9_7x3 zz2TS(nv&3n!=d}V1u3uk0GijnxAtB~`+cxe9fLORed~hmjUag2IYdeI2&=2ON~C(3 z)8huk(yTfg*GXWw2iuk`W_8$+G&#LS%GfcQ?=zVOaHSs?J$^A9zTw?YtT`txm~*81 z!wTVwJCD$LKHFAFHD=~CSe#rXW!qAuq3CM%7BR9dc0H4KkN_{L6rM}t9!c9Iy}izS zG56JQOPq}A_PS;G*M;-IbHR50ytR%>1M6Mt!+zCb&5*SuLhejf>yFdx(U}hTvEX~Lkd48jIV#aQKm5#*WCWfI83L>>9Q^sd79N8M^rf=Sj_il`FXT1O zAvzxL2f!tNoq1_0yluPiF#1k_En1gz(=)&!m929WRyAJPbw~=U0g-+9P)jLJ1s+%C zD`_9$w%!FzdP8t_L+biGT`0!QEf~jyceojS1K=|3>PPiE^XKYzVLJ23{cK@5!g?W_ zIKk#J2Kq@bZ*P`VuS975j;&Xj^gynH`iX9txuEe~(aJ_0_uK18&b9+Nib_Sfd*+To z7LCfiG@NXg>XwtqW%0gjOrk|N@DsPl9-4xu9%6c4k+D`er0?&t-mGc!(pO%kVk!E29!%eyXTCo92X5Hxugs~1mL^`y*} zx0i;Yafbl$jqIhkrh@xzq6H`qu6b{gp=UN-iGj` zS^1isxas`wKDRBhS&Mf@{Rl|lL>CNah}ZN zw_x)yF`Rhc@XtGOjcU}YUsYF<#G{^PQ}Vw(?4gKbP&t0J#f5%QWLm~;l~9kZ8{3`e zTjW#5efZFR&MIZ)6$6)O#OBsr(G~BU2kngB6r7hr?qyc7+R`sXN|QxD)l!@Y zpH1$Mwn@n0Y~R;ZIBilU@R1w^(rNeVomhOaknHSrV~R0O5)Voehga^XYvyf>+|J&k z)j}1FZV6yAz1I8Gfa#h#x+mA(<##s@V2xaTSpM?%fxt8W)a7GYbH^?{_paKE)m1dO zdlx3hp=nZP%xbEI_G&N9cO1lQB8_Iv!uo{v3$H6B^G*Sp=^EKNw`1%sqUYj!XSzlL z-EpJXOJ4j6uu0fls<|ZRefO^QyU~evymN>rOC4jk?&`W1?!As0qvHp>EnDWF*|NFt zxw?JM5Zz{80Bf|0Ac>@XCk-+>F|eFe>yD##QBN>0Sk{G2dU@eAi@e)NnNuaX^tRtD z!znUixGcnZf-hFcMpJcP-Ti(aaFDfIUQOYINNh6KkDk#Rt$asaO+&^#?XsP34R_^m ziNd{1T19@`Vk2HiO8(u>r?~*cBVrS|!}L1Or2Fpj5oOZ5nsYuRqa{q|f=#t|&fgtY zcSyPWbxI*}>q1g?&`ZTe7jqx5FkezkmPv~%&lx-^!tfUKFML+-;&!yo7(4cot#)4| z^#0y^rtV#>yO(lG?p-%MFW&3JB6?Xa`?8)M4`?62R^E5A)Sgj%lqeG=n#@gNRcm0Y zY5cs~T&!97Ai#%2?c`g8+ePeFamaW@Qs6n8&9=7EiBG$E*i56?Ior+HQq3T=;4N~; z%?l@vdTT#08~a#e;Z@2|&n$!ftuDIbi;5k}6pJvud((C84~?96)b`#y z-t!@Giry}l0L1L~ytOgOq!t8x$S@z?_2LWnY-I6U_~@3KeN87I{Hw^=n=VX z&pIBvckuBLV+Nf~+5FO1?a8(GGQKoz6^xGb1v}G3efw$pl`{*|sadD3!j;yV2EaNw zo{m#B?WNtNgq0e9{@CX8vT&zK7C6uqKf_!|H=3}& zKiPwqf#*EQ-+q`_(ix=a*QJ>_$Y0p|l_|2vfh*rbr}ks20|(3z_Lg_m%vdr=tW5KO zqaBy1#V3Sua{BgU?ihZxh)tQa!Cgf^C<1J)kVu{7aeuORJx#HH5;?bdx~FX4mQsvf zJxC@%60u4W=X54BP^U!-tpnc346~p@jq4poS zhRb&wb{~|*H~77_SnuRN?8c*_)jPQz_e5ziH~VRu0kUK935$XT`;XeQR6F9Fw_3_k*{$oW6X2bRfOmpm{sVXml*%-G0(ac(NJ^#X-qJKx&OZ!+(S_UoIam$PQ*o;Yq%IIL!BjOWOj*17| z7CnYley@ddd5k3)Gm8^gF9j?o1C+7Ko*%NTc5AEd6!uiP$+#0{MKhi&zv5gOm?R!7 zTQZ{kY9wE|qn~Qw&nW8-J#^~*(lOe8mvTKN{NO#Q7jJQAQ$c`ZvY-)vfq&+_u4rpy z#ZHs5KMPwe23LXD`MVD|D(9Cjx5eFLsx6@Sk5{~siP;CXUWJTb;p(h!;pI=`e6V?{ z&A+_P8NQ<*Y~fE=u*0*mnzX9Qw;&0uCDjji>FttWy$~gnO?lz-a>Eyy(Z6ak9 zXN3y6TMxXbua_!oRZrA^3>(bYnT?>yVM@&Y_`tF4Caa_|$mZ%9&&UJon~Z!|c>(FL zIDgyse6(+0_v@Wso0CP_PJAERZsnXNw!LpBy7Qd4Pg|8#r&C$4Ryv0CFj3~zVJqJ; zpy;xh56?mrrJbk?mtMNc0Phl!w#4jW^!kx`{1d*2RnA*)>)M&@mN(a_$6zI~&T3`U zlj|?}o1->-cHx`n&J$tzg*!Zn&BcMfS|wqS3x_5V%hsKmGk(#bV3-v`@!_15Qp1*< z7kk~@JAHkUzLJm0sDJO~WDr*_iJgyidr-W*=rJHe)YHjIZzO9s>(HL=1 z`Nr#<@G(22%1r9I^ft&hC`B+t&)tMr2)2~XKN6@?p_yFmo@6nZX_vOfgS??W#>xW`_6P~1)(6FrJZi4A)fc_cxImHW(>n^!!> zjSM`%l1(ebUQmq~kU3o!%ieB22VG$rEQ`0=i0aN zfUD0XlJQ5G3*R(?ub!5WNoI~er+ZO#SJK;O)-bECsphKYXijzE>Pv>>Ge@FDzmXXa z8cDp*5A%&e;DeEO(j=w7cVL_mA2qnADa(wfZKkn5@y_>33|13T%U=3AJR^eUN1ge8 zZG3HG#v%|qI~G1}tKc}mSU&z5#w>X64TXK0kUDqK^YSN!r~7IPIWPGOw}p}40AFo` zs_@f|jd?1#lVbxG6JT+D)7cNE1Na(6uB46dLUx))Ys6!Sv%kNvCv0PQNMmMcLvOX!l-44$qhA?WJ+du4mWt-|rrBWqx7eSfxA%e6D2;NeK z4Ly2vLnFj9zMOLQX}u@~N2JqCpm<&`5eu}Y*b0~kAiQO2oNwQMV4j2DCkfeEl`7lhk(5oj|CqXhtDLeAr@mP2=7FE zknG#%gJ?YZAc^NhwLm?a{GId&?<$nh1bKpa8`iYl-$DvB=azIVW|;N_oOJA)p;hl7 z#h^o$v1q9c$S;j55RN1u(8kT15Z{syUZBkOLvn)TEP~{PC&}=4^R0 zGKCw;EY6%58UXQv@q}Q4wy5!WrUP#IiREw`iRH*ZsB&x>j{LL&Y8ECRMXUSe%jT9! z@RH^)=GZd~&;lk2(|J&E*kbNgWMbP(i0T$6wG5rt|FIH@DbhPjQ;=<{AX*!_6O`MGB zJkS#j!nl7iMgT!_@0-S=z2QeYsy%anVp#>C`H2-W#8DD_0zQLOqoG0iikH9jv^4Fu z_Z=V!Ko4yo3ROVYZ{Z&)v-D@7{rdv9OoD*+ZZTtt*+K=cF7FvPNeh5BuEnLG`;V!B zpRSWJyIYV>4#*?>i}l_mIqWwa}qt2`Y2lR zdg5ViJqOj1$6DKc2V`u3pO_IH5Eyt)kJ=UOG(h2bO5klXsW(637)9xDfO7YEjP=3- z+RJ!yK2zs-l{V1|*S~{J)eY5*sTXrK(nSI(%^nc#;Li>0zJh`_JdNY&KyqxmI4dnp ze5(^^&AtW4E$})HNQy!m{ACJvoC&z(deRv1bercR@zg6tX-E*`qOmJ@AQFmC2-$x| zc(V+}Dr#*~3W4Pe?42r~`jNaN!D36#JnVJXy~WkBB1?UR`s9O%RwEKpKO6I{ew)Jdxi zZwLyQ>#yc_FBHpN)Ua12A)x^48N_TQI%F{9cu0T})dfG|gEr&M|0`s(rZh1{9ST%Ar@s&3SqWn_g)52C-NxZDK_2<&nuKu`Rov{1J)BPM zxKG6UfZYx}C&z1o9DkpM3rdh1JO0EB-G3BHeRdbtROY}DB1%Vr|AP`r5DiY@0T^tv z%i>v2VJ-7efPFKBbDL<5+W^Xvq(}slA`FF zYyq|yKsvw*v~6}pVLVF_H?P0+@)5Ub1X@6vNrl1(I2VK?IVwQ2&R5%NT&OPB8426h z3zxhX9w6CLX8HNZ8xaoO>S*WvT3SEe7O_AI)F3=~<`)2xY z>5RzmD85R0HW~6tYQGZ%9$5xmXAGH1Ov>YY$jEzM8th!!M6gXd@MQHI0K+4E?uldq z$smGnO`=JFcG_TZMpacbAX;#sdkziN9T>z1e2q0vW_(o~n{(9YUIWTXe(olp$qEpm z-fX*Xs4Co5OMdK_z1H%KjOu86p9boLYk&j1rkN)lFj+ONEeN?~wPiLd-RL${m2k#@ z-T)T>r67VMO^{RmO3O$>;4K$$gCOWuDfI*angpEMB6>Yf*!OE!IbO{hw}Jfa3&?%D zrg{1&)hz8~rpnSqW}5f5`sr`)W9C7H3OOJ}w>yz^c!~YA@nB$)u#C)&G0y10lngFr z?OOuhAV0WT&uHzZ`-7XSUG#pSXWz*}C{3@@;x&x$;#oY9FR2&WgLYsGO`U+9*<9xw zExdTDI+seSyyL}0ZX~ZOYGS=8d3uw$``RS1l2GI7V9Gp+zVcu#Sd)tqc-R))(*`G^ zhhQxsJK%xnP=Zy0VRtw=RbC6Z0oAFq^UVEoUyH5m7_ha!R434U!D@CSezI}MIh0YX z4R@@?_LgRmCQe`&o0qUm>Y*t-kXwg|20tVBa_!cRQSkj`xV2*dx>%VC8f0Y%T6i}M z%dV-1)qwbbclC1n^$lr@1kJVh8>R{YtaC+N*`u|*YT*8bX*j4dEX|xTW{B~|XmlOk zhc%ucG#Y;)Cc+$mb{3GuJxO%L>RX_D_zhmq(^O1W+SH^p1ya8peQg3barq)duymU( zK)Q_v_|6onL-sCoJb)lwn)Z>BN90zPAurMoUJ&p5nWbBvh%-As$XDc%^we!D_*n2X z(S8ljFxEFcEZ?Z;Ex1KsL@r^v3I$t9n)% zEfuU%0A%6C%RZRg=Jhjc)Po-vYGc#Sz@uU=ayo^V2(K ziA06Z>o6O@J_1JpbIGR69rS@ZPX6yO$Ji}Sd-GKHz1@Z|>?A{{G{)atZog{P9nC_o zhBYxW6E`7?JmCbuK%s>_?zeZt6f}$g(5_5)rXX`MZO$=mc=3}J%ySv)BBLiowuK}t zC|oMS3{9-->@0A@cn|9ZF<$@G+Zkb!jBYkrQS&CCNX|bO}R(L35>lwCyzLVF2Y_ zD0)`nBRYUf73kzpmbhrNjd6u#xI;ARkpn^yBdIP)0n z_02;j=kO-l{OGUu8PdYnv6=*$&Mhw=VjN1l9LmS6Hdia)D$E~sNu22x1nS5)L|Sv+!OAG}`w4oneXQQysxptnJT!RCmI0 zAAGcqy4OLOry};?qg`JYYf=PSs9t~!g!rBr;2CmQsIZtCb!Y;^!UqWZx7z*ev2+4- zcpUSX=Rqu}$Dv=NsyKO?CFEZY%MJVG@%3PAPnFGy-7CSV(7EYlCVzEY-nZXqPx2k} zxD9Yrm+zcIWms*8H++G`rML06X9}Zr5FdESb(|p#^8sB?- z8>|iEMd@CFRo&8rm5xW4yiBHO z0eF$E(TWZm-cI2Q=aLB{P$M&GS8m5yoqM1RbK|DeBe4SKrc=!M2x*LK!4}N29?_D$i9+BxWn(o?le6_?9_fbua;^43^3X4`-IQ<kB^9(?^{HXPwKOM zz#l$cALm;^-dkI<*Gfx>s*LXAg~8R_Cb7yq`j-m^k;Rx_b zCvt_RqNFOHP9}5zbACIft5_Cgq2kl_8)i_BA>>Ey&~D4NTo-FAd4|QXrB2mYurz!f zGxxEpk1j*exSURb0HYe#d2WqKuMtk*?#pHdqF(&cW_Sb($l%;6WJt(37VSjbS^#xl zp?%IE6}8z()CDmhiHh6GEENt0H-=hqWJn#JeX!-FZ2g;-Wm0}dtA58!#{e@ZLJR5; zIi@0bpFJpgxJx zA?XBU(4$HLjqC6>2^n7geLW4J>h`-k6)_m@fU$6O(a_JxYHpJ&@1d?OSxjM5=twQ% z{2HEc6DW>DFZq_JAvK%bV!ICW^FUUN+s($%n;; zT17l`Ua=agp-es5mR(O(89sZ+eUT6zK1JQ^n7ct2etL#`hI*g_WIBiDe6ePCVs|M8`e3AJcxM-x8{=wolw&pIvwtpxLRdB9Eo%V$Q54FG(5>3lrD z6Ov{m)d7-N6p$gc0BOe^OL~ga(3u01bwZmF*?B?w%!GWswc6&y`kYvk&m)H`ZB`A; zNo^K|nz@=}3Xm0&DV73FVY%!Kpan4|3UJS^YPE9`^R3MugDRp=m`!&vXx`3Tb*(9P zAd4hqMulhAhzt0>qF;J55Pvm#E|zY0DtmZZdGG_UF?PswIGS<; z7n{)9r%n+E`VqX6WXuq*ybE{B;w&`cW*ffF&x7Xp zqKRE10|M0k3r2O<74Kj%2lmcBR$ z8Nw9cGH(sV4P_zCI!PPdMAArVMqz=RW#$Qe^vhcl(`e@~c=n*-3x@!K_SB_y?dOfz z@tfY=WjL<>!I4m$K%k9S8POe|FC(75o|e%-k;Y2wm;1*S==7(K1KHIKX0v29U8V{8I8HwFx3ReaMJwjHpNT65-R2Q6yw+_t{=Oef0`rselNg6xa4yzGfBubV`C7kD`7->eN zi0RR8k0kAfo9}VPWve}VRU|x>Hl)CY7i!WNC*DjvJGrAL39(zv6l6yN6{+rcUBov7 z^h=m!l-bs?48yEGI)!pgn(5Heq%uO_Q~)2e1p?Tt=Eb>@*h75(P$rQi<*(tnhE9C3 zJtV~`rX0k}P5f^gKswo#hu?d2UBa!ehj#p?Wad>$yMs@jdd9rb8RY8Q4!p$*owySi zU_W|ZE+vFA(rch1ILoy+kWcq3X*08(zHw8aG~->+7kW~@{GD2bC@E85%Ct1jAf_8b zA+GLoXE0K`L9iO_faw!;z#!@pi{e)-RNYX<(-b3(vAor>cJ@6j4hk3}E-|E9gkNRN z=u&@Et!RDbW>E<=g<3OB;t@bTF|9j{2*r&)SDHg`vadaT1@XsC^}fBK*Hw z%u(rlyTg)^#z43oS&YzVyyc7iSe5^dcWgI}G^=lTjv>}{Kn$X*;ne-z23(IJQth4j zZj&t9Zc{}9s^K#r93gmpnVvHW>+p;o1{kg9yFzBjTJAsb|8kLGhDnvTqCrt496nO> zF8&%d`eHilncSBL0GmOkwvIJ|vS)*7XXS-c&bZG}Clrm&#_PK)+5mkE1gGMM4evvF zuK4dO>B|*aykoC%YH-cU*}HMrDS@XRpO%-A$JI&q1_VOV=vS~NcU6(6_qjzj#B)&t zFBfUsR)}JZEHRKrehG`Zg%wCkH#ir_KcX5khPRN-+d-c21DOX!H8s_q<`2g(Azr4N zP-}+Z0I-zM!Z~^b)YxNye*tjupsR-(t79i_kTdPA+DS57Aes&t>H#Rdb0f<*mED6W z9bZ?*^es*9Ol__{+5lfnQ8Ipl9>E|3LLoV!ipU>4Ce=Wvp?2H+d6yp}zoEEiu- z`$*D9%T{(1PKM7x{Yq^s95wt6)Pz(o5!XE?{^4`B6R6JXYVw2#Rh}faGiU;UdG2sh z5Y4RxI1Cbwh|D!Cnb3bh2-fna=bEbT!s_2F;9Bw6oN)N6<@?1y95)PW420@I(q(%kv_RDP&PI#vb<%N<;Q+TbU9% zT;NcU@wh31?``=$jav#s-;+$I$lMzh-%En#3xX4J)vY#?t#nXnS94znBkj?ASb)0} z)RPejlpPSU66u9F#qk@??2d?}Noo;of5pN#LyfJnbhynu@ts-$z@c)T+hJH|(yR?N zQnn&P_$x62ov9{Qdc3DhS~+ASqd2+lJFHujEq$dD)GwRvzRZ?)*93uf%l0w(Ks4V7 zn*svYXP>)-1HkMB8$4sAP52zpj}gaDRmL#9Nz)f}q?9@KaQvkQO((n|&PWzejvpk$ zG+Y>SJz)FiP3cmD?PrHQAqW|D-ca$vxjXJ)m&PVaF?McHr0%}_gfX0h#<&E?!L$GQ z9S}9Lax+`x#jRKo86nuh4B>SfhH4*NphpLZV}M5umoq*4c32w*f={_l#^}9q@1F)5 zv;?Ilq3$<-9Quweg1*b*J66B1Yk8ZeX(Br@3@tzm)&{tnG@WZ)B{C2HW~1Ib-x)d@ zH+hto;O`h96sz*tFdqPN|ISlVfQM?|jU~n(NAmSX(GUuMs#_<#Q!(K8Xf{Xeq7Xjgq0P zB@msjq*>S7RpFCymTnu5IeLcXMAt^&V&?yj+>keN9JJxLfedM#e9XuVokPv-1~+?* z86Jv0KC;tHt!F(*1K7391c*pMF)mA(M>CvyCMVG_J;L^)fH>q4y%} zaQQx(rup)VLY&Mece+qB6~GZ^O=wQFaU5me{y3R8^If-x4BoN1?sT!#Ld&!-JmJ#W(fDu*Fa z>yw>bL+j4bgp1HE=4Fpgmw_efpj;axE75&4|phsS6i0Fd<=>seV>>Su>Y7i_L!z^tJSWZT z26T&4`n+=n0$@`ky##R?HXhX{pGe_FJYQN!cqg#uXI;0_3`m!di6hh5Dqzsk&L4|O zBgQk&u*E)>>LTeH@o;OkL#$mfDYXFl>CyepLnm>A?a<#ketO(qTF5X~{M{c9#CV3z zOvU0dR*mWg2ymo64jJX(0j_C2uc)&Vi}gmmmFLW``6Tfr@d@oN>8M2W5*BZfXr!o4 zqbIx$Ki=3#Djc)<1X!TMmkqd7cX(Vgx*BG_%Sq7L^n@7_z7|1lCUp*@3kPo27am#c z2q*gt@Mk=^=UN50Om!8$kT!cn=3XaDrz1t~+6&0D3jprPy|NIw&* zqmQ^!v^bDwp_*ZF3~;~8FDH@5uPHxqI)=4fYEgg2a2x~HF@;0GN*#bAoW-4lGHbIk ztKbH0b4e^ivX(UXSZb&|q9|g}%G-f8s{jX4|ncEO=#n2Pa?=jN9V-6(C^fRg!*zTqR404I$9r;Jucwu^8F-SF zZ8!qKbX$r1z(58OgnI~7t!x|JDbS&aRRQ_SkV6St(Tw4`SMnI2gG=OQsH~I*D(Icj z=LJ(nk6$M`zggV$nNX%15DmBr)kPyVdXm@)-_zem1YFsIJZTCE1?l-2+@Z#uOm+2> zm@fxySp+1Fb6N7x#*A#+&lqk9Kp4(+9%>>s8KxjS!qFIdZkc)Z7J90Ws+l${*g*6j zgd6h7D+~<<=E`*>H9FR@l}tLh;d6=ogvYe6JR8t=u#f1c(xse1TqOKV=kyB+28Q!d zqp^#Eul-pwOyg@{Mc58zmyKQ1@GePp@xVvwZgNCLgJV7Gu{$O$NS_O`)@F|qU?6Ik z2c%;TzCic=$Tf;tc~?h$oxEp56yqx9Bvk;B$bWSRX{Up!BS?d_*`g)UudE_l`h&A72&KIHwp?Tiv^9UwhlA<3kXH&SCjoQc znR|Q#zH6I-x8p0CidVHB?B7zRA)mMjTKzirJQgr2!Rl(nGi2cL4*ai7 zSK$xt?aL1wTnxg+O3V}KCtL9bnbYehkmM6qb%=OQk%}7DO>y)~6?!X(@SQI3{xNqP zZ6Fko^}Ss}PUbW)TX`qD7^Wxs3hl%Qcc|gDY8UTK+`@et$mrMT?f3pLs;=p<@LVwN zG=kH_sCk!Bx$G`7hGfAflc9D3N zlX*MX%$|mBOJwGYXm(g`Tu~H}hePe#w>7mw$1(X+@t|h?5!ZWmHW6+Uw_?YB95|6p z{67GDK!m^kRw#Oao>+e!zykEQfI;Ed0Ar{X)^mHgIYi3BfFKU!!2nYqBB9=tEY7oo z{7_|#ImySvGYVYb18LOxrcC@1nmZ!ej{y1^)MVHYUm&Xo4c&*Rc2yI!VYU?t64;v} zH8w)UP;m0Qsi_Yw+h2{@8KpEpr(V9>zjFCoY$09rYoI7Cq<%DAe2aF-E#Qs2F zcZS$vEDSTi&pf63e<~i3hy%eMwQ^v9F&`S z5atj;&w+#y;%mo>7%Sof*qw7t)eCQLYC-rUOuc7do#`V5A2ffK^w1ROS3>tgr28rX z*8=jWvTz3S>`!Zoo*n2wFu=?o=}=cQR5AtxaUi<~jf9wxUo(^g0|K04=vBWi%mm*% z6>>kJ4<+;?WU3p(x*&g=saRbnE5I0CC~BfmAEE`r_4qcL?pX@FX}ah7aGnid%ZH(H z!lv&sJz)NP954J)(@`Ra=zI7ACLDZs4DkC7i0>_c+(6+>GqT=-dk*UY=u0WeA%Yyi z&V|?jTyl^0ky(-KAX*F`gWXrV>khWPtJAIxTGTS8-$GtrF1 z2H*!^PP$)_C$CQ~jSsduXBxu^Z@~)Vpc6BX>^oyX2nQl(o671#B#l49be5T<{v-6c zmbKUhz&Qp`FNFKMake?>O)6M7hBy%R6+!QMem??o4!bJ;{KjPeF{@rw0_R!+7Jw^o zy_8xH!dd|SzEMWy2q7nMv(lC1h^jLjP)Srn9BZ1PxFPx|tcMn{8({lK(lJC6YK=JD zp>BZBkcJbm2V3Ubp%(``{VNy{#DS!9Ex#uQg!!a4*dN&9xvi*p-jU*zoD<0nK)q-E zv7NWV&$tE34m3yUgd?==ENr`${kQ94%qah zlf@&G7XybSoMi?*Yfs@=ATP+6#0>EKLvLcJ8^iD8*f&h>BE07mZ1cfPJKnw`Mb9>s zr$?;-e?%IGh?gIFil6mk%bKIqF=l9XKV2Mi=%W=lCb&}4>R-ozC=Qf^0U7ZCn{zFf z)zu1iE^3HE-8D&H@_ngP-57cfr1%;N`3A5F3x?~XJ;^rw^GhW80=aqREcO;i;t&aP z3Bm5e+VFAJ4`dw(@xasl9sf07z=Q*ZF(Bn^Q+fIjNjlFCa>lR*fOAQvch^9xrkU|` z#e)0WXe|38HR zhJNLgO=yPZ4$u+f1B%)M;E6#_NLd(=6c5PFBdhv;^`d&?fK@!e!Va*G2jsT{bdCtl zG)(t5661OZ@Q4334E%%q5haap z{CzC(=C%7%M18^c#egIp85<1Hx#DZ3Ey$jt3s5&GqH2e~mOG2Z1EM`>ZYY8QHhj|m zJ`BKkLE~V-c%x+p)7kS!WcDeR5D&1%z#1tS5UUwF@#=+CJji&?wVdZ#%8LP+9HM_; z*No#uO=onWr0qEx2R=P@eGb7MGl>|GVh2Qe(kb^WGkJPbDya{VoIPm%Ee??~#_OY9 zQ0bE9t7#lGvr%ujE6L(nb=vpFfCM|h1_Ki6MMC|^{$UI}{4d4;jQ1zp&{S{h(KHTJ zR5+ZBsV19_WjBzMKT?lgRATV}d;W-|{-pKlKcZkyx_^s9RNwJObtm+ws^d#X8!amu z2k_(FLwDrqHZh?T4w2IO5GkoI;eQJTV7xu*hPG$9lwcFU5yXMHk(%Dl zB&+*}8P2t2!yi%H4#>iQ6Ek-}P7aZxeTXE-1OFb6Oc~?jNjKy-(88S~f&=)OG}vI~ znU#&c9|q*;LnNtJwM-0@v&Vom4iUc>l@j|9{ilrAN4lU>#cd91YpSX+;lRvLO>a+1 zgIh;)7<$z5Frd73fr9aXk~ljZl{XysR zZ4}@TDTV6ZatGuYe;w+K9_;FHyM?`uu`C?GkEwy0?zY9&wWn7q*rS#K z19J5tQZk1~k{$TJgaODMVt?@7v$t6nQGH9o0sORf)U>V6XmtCuy!}a~ooy-)2E^=u zf*hjFO5zax7yAOhz}sV8z6K1n^kUMrG#tQB8%Is2>dZ#B&aI~VlgiH_l7s=#^X|U~ z24wNd|C>2P@?*eiv?M*t!U4fTQ%$>_$&HR*T4yH{1MJVYl~OCHCkFm6@keA0SRej% zxXYgthFT4greiiZfFBEEHNA}Kmdmde*^@m*f;B^#y(#5B*HR=V{2zx%?(w|3Gx}8f zBf41H?u7dfhOK4kmn{z92Wk-GJPd~&%(lIDt=IvbU0v6oR3SUS=4?|IXBx`IEC2t9 zL&U8APUzinH}vPh&abl~o5YzJscW&(tvooOe=JS7`k`JX5eIW@Z{OJHh%Og3q4pli z!5^tF2EIR!?7t@-fV|L4ydH!+F}4G*cVxLN@7l(oxjZ^m1P6k6@b@tC$)96gacsH6 zlgp$>DdYs`m{1-JNQeji$03rcd+ht+?^kCxv^|s-?hxL_N!LyhUDgK&fT+tuY#{>Qm_3b_?im!T8%KQ$ZK&zuf>wrgyClkuYD7K z%@AJu%JAAp;ny_bwZDX4Q)75d1zx)+`!!AtZ^(Mh9gn#Gz4pJ?{`cDdUi;r`|9kC! zul+~7R>R`88VRr6ll>Zr8JOaYkFsAQu?ka6BNF?Nct|8h>P}vZ6MBt`rG&pALu{tT z5W^W#uL*r%EblepA9yWk$e_OZI@UV)pZfj#^%{n60|$nd2L9zcW8*{)$Bxs#SC0|1 zSN*Yn&i1Cq=h(h4d-mwgm-m71)!VC`KGfh3E4}xLBm2d#>*rB4aOe_;AGeP0GHB4M z>s@M-CLDL$Y2MV$&27-!r4i##dRes0?RjCl<1miO__11XR;ulPp1N(^`@dhjJP zVP*B6ozu>R1WyX@JMYrch?K-Kl@wJwj=Pp|tB1&_sjaPU&;*}P^ZR+7b-H+{B}etu ztMThsl&|Xatn_G3VX(PRZ||p0S(Pp)vmY-G9K56Ts&3czgg(1|v82h%4r$eq{`YV1 zx)Yr5HskV|HRDcw`fKl>Rl1w1zeTmUk+#22@U<7W&R^gE^3TspZ#E9^H0^HC;PTrc z9W=ZbjI!XwwcT8E&YH}~L{s)#>QDs9_q_uKVS|C&eb;Dk|ef$?#1TiWQ2KJ)Z! zj_&bMK})MUd^qdU`po`!hl0-(cJJDF*01yC{n=qK=g)|}9S!b$`zy^5|0gea^sKuB zG#B~_1#`ks;APm)VH_}Ij+7j zar327Q}VlheSEs%xtN*Hil_g5^p@cmw_nqa`(|<7r|9e5dH>gjzP4YUwZ1y**84f5 zY*+cmYtFWbOT7J~?psGY+ZC&dkA&W{c8GtydxW2Nzk4eNB*eM3T0C0w`Sz8%FJHz! zzPJlLx$776zTw^XK^wk?OppG>m{ZcB&DR>sm>F-|qZ_d=L+*Ss8p*jjIcU{^YQH}o zu4xo>GNVJ6#j;=sar-!6X}F!MkhtE-E~Gw0%YC zuBv8Qan}tN+I}u^>eKzr%<9KucD+$~99gr^pk1G)6SlS-Zs=&YYSTm4;tA@<&GJtr zl-*qB-e=aFh%?!9di%PReCRb*(<^|JpB!YE|K(Wq-($~xdxc&dA6Mq%7W#3Esrzs1 zzsyRF{bc*~)aMQFzy0uh`Y)U zrHM+9o7LkS?S6B<7?H@W%;^;zVKn7S-s-Peu@MjMe!VpIrEj-glQK8;DfD0e&r^kHZ2U>+#h+ieVe$p{k>{Ew%*(NApc0$0k*Z- zbE8(4hXe+e-M{$UIk|n|)XBac-I86d{NmXu<;6Df9mUn%cd;V4a$ECHBZ{I;+C*+KzrB6V{y(;6aRQc9s_#xu zD|6F6QMFE0`$$zqQQ*LogKJhEd)Q>`=P%LUK2(35IPLF<-)`S2by3~yT`|4<@0`0| zvf}5h`PBMwqEGX`Z%=i}ez^Rvz5#Jbu6EWrtGUara^Bbs*gB;5FZq@0#!MOP?&$hU za>;|z^0HNVfy*6SOmYW#H?OL#n%v)gxaKPEl4tI&$IVSf{`v6SIsbc~e_C_%@0)jL zZ{7K=YRBzch3!{62Wm8TE~49lmUPHuBK)E*d4ifp#hntqbmK zc{pG|7H7b@yr$)@t=@gwpB^_Opjl2;A8)l*BaWK)|NTMcmNsiGw`JxwG3K7>qUT%v zd*$uwzf-4Qx$3uTA@Yssv))AI{IT@?pR51&j(sw9-~FV`J2>vYyAGVJuJ6b>IQxp1 z){JiEZ8O|Ir_Vh;zROlOzh9O%omAAhm-p^=`!|+47Cb9>y2CN;sgw4`-9@_Z-fhnO zyllM5_5G0_Ue5h()02l)+IQOI4Jg)*>)fi@x3lQ$-m{PW#%kw1oP1(@&;|Ud+Ya1Z z_0si**G3)fC#f9^t}&`v*lOk& zAJ^@tlk7t^H;4Us+V<;!W&c2V`~qe){wI^ES9oy<)+siD<8r~KAc z|G28W5hhPV^3`igdn~Bzy0Wk=eX#B5qg$%3?FjO1=C=bI%V@{(rrH~;DvJ&{xpkg@ zxEifQA78&jZ|=6p+_8jvtL5HZzkIz|{naJmzTFQmpQ1BP@nc3^E8YF@cfVxE^R~WA zDqTu8C4I6fzw_Zl;Q*&@b1UckaXYJ-Pk>(BWzC9L)o;$lr0q`ibp0zwz4@WpBTj5s zt@2h_}|dcS~Mbv36&l~;{tQQ5}r&#o_Ydt%tC_qCkM*N)6JJo~Zq#b~tm z?&rJiuWxsa)EaU#;`jGk*8H7=YTO1?cK%b76EMW%#D*I&vsRX+_05~EwzQFtX=TjQ zUp0Lb565?VkXaTQ*g3R=$s0GTTXRm`d4J=#)~y?N3-xs>EY^5%xy|ch^lX>=lOY!$ zxU{Su_|{`^!LaAfjj}@$-wvPb<5Muq<;dm6gGzg?@05P2s&>~Ok;ymIDp$Fk|1I;) zlz9&yT)gTXH73Z`<)`>Zzm)G!I1~H9JNEeUk%jYu+;6H`?R%-ara{yn!AN)XsEyaF zB5#-)y?zxIGT1$)W~+K})a~)*u^9WCW*GL3nKffaiBrLYQBSq!mM5vJ-0INqn#KTK z%g67lKdz7Jc1g`HB-NnX=k{pTj>3u)OGbSdIM990+TN3NQV#B%pW7$XYZMysv&L|j z5iObx+2drlYuTLLGom~1!Joc1O?zWR`hqe%N#}l{CqI=&q(9Z}cXa1P(~(QfT+asF z&O>+5lhg}AEeDmyA;5=?bq{aHSTYFvYrJB>oW!}a+K33T)Tb#W(*jK%(bXRNR zypzXsu9&yUh|{>HgYQU-k)vup&g=f(aMj6(!w+7Xs9c_P_g&onR|~)RIEDNj9hqC2 zaG`A5lhh?Wx|L+)jCh*J-C9+9c9==tUo~@mqx=TkA78tzQdRrO)XCUm4jO-8(XT6h zS$Xa6%dP)z`RYS9N9$vyLZZxN8A7GvUS*j(I!J)m$;s)T@l(JN1s#jd!O`!hhSc1#a{c{uV|byMz;>ZjjYpZ~R{dCvoPs$b9E zzVX596z7Z_t3F$cD_5yU71lnhoeWEB1$N#*SvPd%v3edY12ZQD4bp2uuIA3Aredzfm0*^ZTeqRAG2 z-CJ{#lb6O#=$f@Uq)koZRlP6lxbCxk^WK`KlgdnTiw5WIPI2_V+QFsl*{?3Gj&?H} zb|SY-<(kJcXZ06NQ(nw_qWk{*M%`Xq|4lcm&feMn=Fa&y4qHkc?W{bz_m6wj>q7ju z&dC|v5q^6ku@mYsx?Yfk ztk7#SZD70I?cQ8AAFq04)V0^xX0@m}dU|)&RnOE2t8n}&rCT0m#)a-*=hEHRK7P!w z>yrZ>F4gS*%SLsNMtLKj@4T%(_RnuYDb6Lf`q~ez`~ywWoxAK$_QH7A2xxiDJn_Po zw~qejJ(ITP&2VjYyXM}=3)AQ94nM5YwDr?biJObQWIM&bjwm>UKRd?J;^W4O8{ym9 z?+Hm6Rn{@2sIYA3HoG=I4tU!a14V7Z1HaSePA655Iu7}vQF>R;w$AyLhhC&sq|Z-2 zx%=bOMt*?_kNR^?T<|=quHUNAX|hdOyKzmsZ!RxS;;!pEVOswRwcG(CzYaONC$rYk zg1h;+cZJQb^S%t4GW||OlM?-b?Q;D%-0Y?*tIu_C{?un~N=eCx{B7CE+7F+v==Xe1 z-(KTJB^VAE^E2lAJ4C9Wt8yO=mANa|mef*T9Pu-r4o^{7R>)@*w18=>^Q?b+2<0jbuS`u&D#^U7-qkR8+ zEk{PB-%4mZu5ihdLvLrf-u@cjwoO=a+o2s_EgNa!pHRGVlvQ|6c=Bss3%jJsw%X}t zd2tKkwYPrS&Gj7SQ&Gt&xnkt%>VN-*+Q`l`xcR5NcfT>!iJq;MdS+$Y8NY@6PVpG(~TkV_EE%<%hKnzxe!O z&zC(pkx^?y`dY2-@S=Qp@12HK?}IH@_w(-AV)3+q_b(ztKkc!+=T~)P@F*{5+Zl_T zc3Afb+0!66=8v$>g@yh$trwKl{@L>r4mLI&`m^Vrs@(J|_v}2zqy;xENg8S4mUqs! zNm=aklF4x?D(Yt)9tQsW$=E60*Y`qFOgg7&L2hdD;Ga^T=8aKt)w$krhG~F%nSSKo zqo#h|v!=(qzdr|OaSJx7ecD`A>6cYgYY}k8q&Tm6+|U=ZM}BVdyKdlsrEPN>_P(*I z?Zm(vZ(Dq7t74wFu8dQZS6~+5>l^jH%fRZtOzyc&{9SYR{f^I``0h&>9{bkz-kH;? zJ97>e+Xi_bRWJ7SndO%C=%h!}R(W#_tE>Y(8$B58(l&ld3%%!Ndc7hnc0BlKds1hZ z&xF9WB^y(=Jsi>K-1{|L9sM@m-cJVxniNzfA3fZ+&ASJu-1mD`c+T*f{d?ex3q^}4@ZR;MSQ%7jr=w~^8^hmd8g8@#1RhKTPG_jBSecYe-?Eh-L zPwQ9jgT)c9Hm#2OL^;MgS553+<2t(dfLE@;bJGrPx_XXwC!OD%9dS=DCB`Xt?FyQ6083_3_C|S~sWt^;fR@Lj2%=THW7$ z)TXuRaqADvh`4gy%ggI>hv3mi8z!IV?3KAH;g=qGVN=n$e^qvXx1-8$v(1uvOiXd! zGosnXTE~JXQ^IpoGEVwTpQ7`VlgItmu684*%pcm! z)r1(kuj!3X1lD|BRK_(Pke2H*Fr#y&p?c_y)tb#r+1?Ek=-&pCTS zt2|y~q}9ad?v9-+w!K|^%FNX0tV?g}eCHcwDItYB(lV2C=dzRUNiLdsy<2S4p4ZS|W7T6TJJUe(+qL&pIUYTNTg^}G?HluK zvhT-J(+@VVeYu)*$Y_E;r^&86dXF`A42n-TxgF^1YMFQ7z}twLZA+ZYGu@ZA)czb8 ze&zmr=apK^1623yj!U^6+cPQd1E=T^XNk>^eJ#55Q7>MX`Nc!?nazv^=PuUj zoJ+2?3mCD^wP`_!evh)$Q|G#+nO1*^9slaDq2-^x{=WHs@Yuu-ZQ?$z)Xf>TH}%!t z4{Hy^*z~-6?uql2G>g1G-ih4sV7smHmsfpuwl>)OXL!qBRJ-gpncSq++Os)dotoaz zyO3-;^t?xxhe`JH)U#C{L~ru#SbO|k)02HV#XXLA@K&$rTGFLUM&=%Y_D^ch8RwVx zzxJ?;-qYtLrR45u+q%m1nuUVdoEgs$Kv`5v% z7p*RAAG~|Q?cL6$uh5x69XZo_w>|c-)Bf#_oT8=q0qM<5PIPcC&g9;H+oyQMsB?~X z4@^Az*{eLb&aodk`@=kJ9Ua^@3<~(`{pc;vULC(TaFD|v+l{vx>Asv7cdyoLNLfyq z`CZMqwmGe2VNXVt~i7JP~+%5Cf&ar|PJN$)f{$oZ(|NFs<=U44pC5U*dGjpTzx}pJx8|C9b$`|1&a+)VF8*cp^^T*0 z&aI7jobt*1UYmrU+NETS4(T!PH;o^5`f5h|bJqTDZ!>DH zacbQ$Zi?T~{B7<}9!&~6y5VEDn2Jjq!XE8Cw|3s!&O>Il{kG@a_^)@ORXk>w*`72} zHS*Z<@y?g8ew^VS?!IX=>sq(+sJF%m6&(-N^gSQ)rf=Q_&yr2wI$JDx&Ix;2;h>%E zSU%zxPwyk$THbfCP>-gvIgXyzV`hJj$G8c4MmpM# z-D`7v=W{dP-2St@mPKHPmJ|DQ{PWSnA1%VB+D%wC=ac81XRTdpMs-N?TobLj;KxV| zlpYT}tG1{a>mIylVv*M9{>_+2#p&?|`r|^c4dNyz`yUEE?51}5l6jKr*1*ccIp3Bw zjels75sYUj_Kc)4N&E93)_9*M*TX~ z?C8>#Cwsnh^{UXA{!FK`>)(E^Hr-Sc>~BpQz2&h}H^azx&nA`I2Cu%8Hp;?c@T!E! zryf-+(^`FMY`u3@jm;WPa`o8}{&8-;6N5^wn;I=1S>YG(eD#GLcFl%la@)=6;XVF_ zUG{?qx~=cux^!;c%x|ie7UNsZ?6Elc-2S-F3-w2$fHeo7eR-*(tGd}P`{W;{{Zucw zax}*}U)MO;FVj}&=f;)1?7F~zmIbFx=x?bb-~BeYX3Ayb zouhqx+wK`U|MC69?)TJow)i}_&Gw*9?pFp4jC4#tY59&jqtSrmvI2v*RnMXVKIway z`;8mzAD{MH?8l&GQI9&*%=%b4pi4=_x2{XkUZksgeCfF7*AD26zc3g(xr~z6O9S-^ zwC5~$9#LpA?dm|)=9A-o%~f{`inp+|>hym0@A*&v9}Q<6*Yp?l@ofwkA&k*T344Q1?ev7u4ljJc|HH^@B2OXoX>fG?!9M}dTo0a zi;BiU7Yn|<-3(X|iJ2p`h#4c9L1jpGlo${0gCAh(k_rb?4jWg$YsZ+L%Ua9zePjX3 z1?qq1H~_)WTZng0mDQsWM^7u_0dQJkAmoRRSl@{SUUQ_aqyeG8`NKVTj=uLJDswX7 z-1ZBjhST}Y7J$yfi8uj8kRP`+O+GWK2ND|O z`j4Dk63Rj0dxu68mykOE0wTarOGFV#-+g+qfY`jXV**oAwXARIcO0Hg8x8tjFq&5n z_)3tLFdP|6nF7|F{#1$jvVC9d3>xjJMPzVt#%`oJN#Xw3jK))-W@lFfp#^9&s%eDO zOP2b6HAM`iRHAo5>0d^1|8-wV5yMZtqPf^#_*X&|Mn&#PmORiVTlXLQTGQrQ6(t;V zKI3SFfC12KqDzyXGzF%Rw?UVtZ5FBo7V5+M9+c*E_ zGD!0u>9QH=8S5v9LJ(p4&U$+{au#^ND85o=WUS3;V+W{+{^fW0*tMG@@4jJde~UWRni7bfKHpawde zayFlnnD<;=EwfgRm=(Vn&hT2tpH1S-axlS9+ooFL;pSOwv^OAW13&Reg*;n$ z+o)hqTQ!{C{@3Hc$bL}|ywAr|UZxDdo2eKij|N0@R{btu1}ryC%E3oPmDK?gPNTPn zTqU<>aj1R1#ro#))*i15!#&@dY-~Xp;7f&P4f&rOIrTZW?r-vJW+5KtkRLCKPB6q~ zVNYsnA&-V?L}EevlBg@8ww5aWAc8jY*OE_xl58k#I0@&}zYGTxn*IKy5g`|P#m%8C zhix9A%1X$$!HHhH<#%pJ4;B5HNxXrC4h-~NYfX)THK+Uu;5|a5!9Z`NX4bXqe}`K? zYtea`FKf+GB?sFCn(79D3M=B zOLFrT8<8d&wvj(>o+kQbXoAe;-1W}SL|6N*wL*NR(-Exg@+Ry%5*B=|8d}Nr+%7 z@7pyoKE6z(n#bsN7T)2w(P|#A5oiE1$v9!2_`lF|T-ywL+fb5ma z#<+BUm3K$YRUg~ieEpNXuT%2EJ_FNnT~^(dJkKjp0TBHSayy9;wQa)>aPwayVlE4} z$I8$a-Cq<#msI*!xLb6>yzvrOALVxPk9^DZ3m4ittn*xT7a=}K57e3d>w!IkPItCRj9xwa9>+k_)FkW>F@u#T1?WU)W85oo z)Y2{ggCDVzQah<6`fC`<+t(*7^X!T^F1fmI_qgea*fMyO+d}iW+ zh{cy_`%JtR=Ce5PSpCmmFq`xz-)?(j0flP_{8{sY(yLb7WVek!6IWT>mor9D7>*15 z6&Y(p?MOcSfgmj`sU=h0*MXD;Q)Y00L7P{YE#t3&6FEQt+`QU$IX@RD&J`DzjL^EJ zIkC^Ftys-R&c8&x2!RnA@m|&Z|62f?#q?;`+n%juobE_DeMVEwb6J0EjB)JzUjGF6 zbnWaS!bT0#xhwo$v&bVo#q4yeB7Fk3bJI79QI>z{h}&<@4N;qsTf4#)$Jz2|KpFw% zz;l2R{7mTYt3tQKd=4G9r$7UZ*N-0|XvO@7Qg}UsR#%nErEq~uZntlGogbT5Uq0*; zEmAVh0s=KC@^<~EcFSagBrK}$1(G&fb(@K@zRyy>IRati=UOJ8bh(cqgt_67#&fi3 zsy*!3T{B~X!bt1SRyVV^%1@3`=AF7jPQ8waIOQV>vGfn;BDLClMr-NyaobH}A3nvF zfCo-Se>=*%QtHO}0nQ85%D%{Ek_WClFI-x|U;F2Se3rM04rEMxo8B?7Yoj+mr88~+ z0e^0BSK!+5M3IOeXk4p@@9L}v=k2p;PI7^py;u_ zSLxCWwrc;b-!h>u9%yUF9ij1sQLZuQNLifFP_ws2dw!2onoVZpbL+R1WngCmS&%I& zD^gcI`C)oV;X8cU?=u>~)550AhHXa&4ts;RJro_F7eH^wvbu)V>BP8qq2b^*w6JhB z`YVBlG5FsM)!z`i+QO&rz9pl1`EWd3A?HY}iQ=P+i>&Ncu=LhZXYq4Iy;;GlTz%P_ zTQ`T>Ociy&Ad8aNDd6Lx2eIGB$04K^mK-TwJTd6^xUut6q#2Dtv1ybt%2_ESm(tQw zplE`2B@kLAw!#Q2#9>kQ*yui`dn35iZn$nHJ(D5U`>! z$O`mBc>`Mw$NyrX2>*S3HG1n4Drkg8xPR>Z2&uvPthuH0+waO>yyI`H6lSN_acakMbpbl;q4f+6LGeyqF+z(uRzVV>oscfaYiJTWY(((gPwgfH&<6|Qo%}m zNahFpFqPEX@lvDv7Kx0}Ej^n>ToGbY;)ST)rt|Rz+^z^ZH|A&xuaI=~hUnL|x6^ER zGf&am6+Oub^1s`-V$lob4*EZBE?i(*i zh6Wwz+&?{=T)P>qV#B7RMOvM)Pw-o}wB)?WE7r$#@g)4%2!!o>5BNb*j8p_9Roc|u ziM`~0mC_dQ%!c(kfr~X4uDczJ^;B^PV3=}>4}GBosI{0z&96ooo?%@aQATA_h6PL- zdNKM|?Khmu!>K`A08@?U)+xvMQU%K(7&Mht3bY;76R(L4gJT5FA9vJqlkN{S6_(}W ztc&^n`up^x!a}>$=WBS7+Z5`Pg@g0P)XB*k3ne4w(0y&fP=Ra3ovFz#lNJ9j1k@x9g2vgg~gLY%g5bcJZcOY(fhvbC_ z;a_)-zi%G5W=<{sw4k-)Z=%I+`JVDBiO(nl3#;r4PG*U1&2+4pYI&u~7n`-eBJPB| zHr@<_Ph@|O|NeK37&#J|;InR-(!Z9`z;-n5+E`Cc_>yEG~rb4dJBH* zbNBdmB@>Wy(Ej;l*%_rb(UW@TBXv*U+o764${Sbu+clSn5;eJuj3#eYIk+yq&d8*% z);p*NNTtq}MILUqSHYaILsNYki)abPp0YODxY}%;rMrLYliCJ#G>5c%*lgM?hd3E9 z0RbtK(U_zp09~FqY=l0Nn(_C~C?V*QJmQ0K_U5&mbd&?3FO^d}1}qk(PPVug=ABa4 zbyFyVl4}q2MqbmkzUTNnfgLV*Bcy6akpRZ1lD1AqulOA=ebw{NcmB|h3fuYN2SkvI z<(KB#uqy$4|N`w%7@K>pIS|kf54{@>5aZ>a00lMi5sW3lRRyv#~NS00^CH z+v6y05qA;WrA`0R>UD#Ph++*Iz56E_g8(KUJkihq2mi>%m(pmLLYuJau2WIkhlkD* z1`m;?(FBj5HJPd~cmRCM zu{Ms%3;?h&XY>DHrW$a#-7vJ?pM1CQSVQ1cN=(Nt^`F0ocMF!`=xf#tRb?i6;>(D= zdeYsd!z(uT?_>=~(5HpdoLoBr==!Qq*^ZNr=egj3_d~JaCNA3fv4zhqUQyEnJ$STc z&xHT+s~z@ew9*q(hO2i^e3@}nj&%0Bo+XW9U_ZzDv5-+>f zSQG=meL+{QXZ&keVEdPT+`B|As`^dNBA=B?$gCuaxL+ECh;->7W-_(sjY0qL{ry!w z6i6kHRHtIm;Ea0?2g((56vP0k%MLfbL2`2|ama!)o;)$%n@y;?&(>q5sB`_z4U~jB z3-ww`T4#bkW|2@MDJbo9;Dp>ROK?+tw?_-vit|xN)T_k}CO_=%_`?4>bKtn_VY)Ve zA}Fs^KWbF?BGYMu@OGQJEZSFy2*W4u7OQ~HS*@E}StV63@1B+TqKCvXiWxBJ)9=?@ zhUQ(oeEOn0Q$}&0k7lm4JE!zkwaQAP!0g&H5FA}yT;dL5@(5!FKq6iP@yK3ssdM0c z^TTI`uiZ$2zNQ1v-qr|7TFQM=*0R%o9`370UN4>Z177iTVEIGY`vMA;P*CK|KWPG^ zfzi)2E*3I6qZRY#e=3vRk4&l}=~tyftZBswfx}q&)7-B8gH(-o&M#{$amuZl{+QBU z08qq4hOWc(@Dg0fOULwsllAhQTZ>O-qdXE~c+h*gK!-(l*#5X#Sp9tyv4h6jY!z`teAn4twL6a zhWpp&VgM*l^lPjL-TIc;M*{E(6CkI-6xIk8Ibg%dmBU!eR`yx&Dl;zt1t=DTBx)tQ zU6BtoR*TQZ7KN2~uU$IYLZyB!28{|^%UazD>9I5sQ*=arUkMeDWQ+*7-as5>F03pg z8xa?&u(efqz87_;x=ku`Bvby^{hYY(=+|)+JQ&RDm0V;r%iTh zb*)J8&!WD@kPHuQSt57yRP5+*1zae1#>7emGrA&xL{u?P!0;Wol2=2-mdgi^lf`^* zhH*P*cPOEgMU6~OTAon;Vmx|wbEs9b5#OLbA|*+`wM`iJs#>l7CLEYXwABI#I`|tF zBYc1s{vCSo{d>b0Vyoa&g2H2XTz@L1r@R^P{mM`@;Kkg@qBrb*;-$)qt?OU2R88xq zMQ*1g^ruo&HO#uJ_I-@LW3{n!sw}{s3UWE*=i5YXdHOgyVkzz3Prs;R-#I-zWmJ6R z_kDoXcT1$19Eczo{6&oJjIn=(4J_)F7ucPa)n9z)^oFVo#Q8HwQUEV7OmQPN(wggT z-s~zq{h+nkHfC2RJ2PvE-cFZrzY#0TWnao_@N3UemU6tCpJ4{=DV9nx;zu>uP!!(Au|i;<{g(MKsaX+N`D^09(NeH%XUH@-W&7P8Ct!pYKZFJIDa2$=f9upt}a4>&8< zme{Fw{HH(i$}8ZHW|emQhqp-H(x%X>=|#jvU9m~4uur;p)#pNnNBTHo)dWTt^I_Lr zk8ZObk16nv0>W%1>?nBgRR1{B3SJpE23ci^I$>k%_w4O;)nV!*{cEPwkdWZ5=*~b| zZT67_nYZ7SHF~3XI7#v3Xr4I}589Gy>Q^Cu*#Lek$YbAyvxs?|i`!WM1H(B)zV-$1x{K^R`(#g})_Q_+j(4B&!#T zVZL;8KxtlL=8w7BI(bTZ7>$MPtmr2H`b z436f6{As57np<0BmIc%EN#C8P>R-T?6rv3SLk);NvtP%YvY>0WPcF1m+CF z=?5wj0FQl|nK)8UZi4Yu9$|ttp5P(PNdMpb3u{CA)sOZ0I#+LFNW$kG_=9Zf6Kt4Q z+@d$qTJ3%qbErmw;R0bh8U>ZAPk>r3+!qTs$@35XD|#y3QeOQ|8aFB3aizS}c4tZ; zd+qE9{k+;xcM#2-!cT`)p8S$qTQ@oQ5v{IFN}7!9f9`^e3Ly{b`AgaR_hyXPr>DNt z?dC6rE8l#(oY2mVWBVG$;TB-7s;OxxXr3nMkJ;L#=MVM*%?DTksKXj%4dK7X5Eo2 zOebJ5ZFu_wm%GeJSc8hmJZa_^5mMwKotynp$xkI!7}A+h)t(UIMu&&oMU)q<*VW$6 ztB;5|)z$7|#Oe5`o2%+BC~sH4BrHwQ`gd)n4>+}x8LAfaSy9C#?lFX&A_idZ^afI* z%#1RlSMdeE9Biub_@qaX9}@_8%jCr^O|H(4hq$@Ja~g^dR-an?MfUU2dow zVv^->Pdm%1eC89?*wY$x(1fXz1uP2`+^ZRBUqq{mjiAmNTkdn9sJ_T4^w)u*_W-Ei zlrRlGElN(*PJ%ih7cC%4Ts`=${5s}~Yd^y-cHm!t+iO;nfIPx6&sREaN^umF@BI|} z;Q4v<@^JlW)r9+KBu#bbA5lT2lLj44`_ux&X6u-En*{swHSGK%L>!oOo(iB6igHf0 zIWob|2?Xv<7D`w+Azd%@no8-D5dC0HKzxSNZ5A8TVsi-<6;`=b(ftY~*U?NCJTh*; zDfhE^pNQlqCIa*_-;(T>Qh$g3(D#vHCZ^huc2C z%xt_V*k#YF2(Kl(iQYl#>#KPNa5q{jO;-0yuWvfZf_@LZ2dmWS3c}zu5$EMbsIaiW zcP?i0LbeJf+aDBGVD!&Y;V238P-j?wWhb9eS@-K!kp3SYC6T>AaHf7@7&uu^9S1E$ zL=i>5?r<;T+wA<<$YsHIS+^cqHCy2o-^Q|AbGw!(1%euTz=7MjVsU_VqbE5&$QMI~ zfIQw79hk~tU@^t4Y*Y$=JBrYjR-llzRb!!>pV>m8KLemXb9C9okaKZgFCTz*62c^B zjGtAV&{i6Fgc3GFoe>E}3fSp+-*+#M6MwZ0);;f$?fBucJb6n@Yw21 z@WP%AA4GyrYsqvup0e*`dA0CFqjfF`?Zk6~xiA(&*n_BbB0dGs2Avv#ZXE_6M~GLJ z1V+jLHrzT(@0`E2K_v>w2fcfoqAk#VG1(O$NO7fkUj_k8PK`Sb;6yjjctij7OB4m} z8!v`J@jEC+Va?Zbx)O~C*<%45&kC1#CVWT#Ws5h)wAd5k{-i<^sxbOCw2e?@WPFr| z$qhd?p}w4HhFDUy(L7%N9ur!#IOIUlyGlSy5bmo%)=^Q%X=@Z}DgbL#a4zrkWFip! zF86JxkR4C16BF5ZCd+EO^=$P3fco1V*F2lK&~o7eXESK1T;8du>oVzD?UX$gKf1P5EGY%#$a*b%8}Cpm^^yu!5lC6revEU`KZ!QsJ;k zkYgzo?T%GQ?+rCL?)-VbT(E*j2*RpE=Q?W0sON0;!FTIKn4`Vvb+EXmHKlEPEBl2h z`hoF^pm!IG&5qP%VT-g`)ifB0q6+hWNpAa$92Zc=mV98ek)Jzyz0`g$>~nh9I{!V? zo!^ZpgT~@P9%*JAVo8D*{Obw0bp@8A?~^6Hdth)Hsr_!THU?Ldw~mvQGf(gPJ|0)% zlQ|oh9JZPh=^rLowCb~`K&=jYQ!{yy9IydE>ySkbkcfTIhRtPyOJ!wvyDQw`ls#{) z7>AB!jrV!wdcB=HD)yc!bpFk;h`?k1%yKVb$>ILV>LTFz4np?=9B|TM$K{i!v#Tr{ z#N~D%iaqX4c&utAZYQB#!t)XfFTRQ;t|T*gZ-z#b6i+oAPlZh~K@N$f!X8Ozum#MX zqPxok5mGnsnnfUEF(ymOzhQEFOw-W%iCzCgn%Rh;J8j-`|C29_!KnsI68ADqa`&*&&ChGH3g zX*CnSzbGsWSDaLPV!;H&PO=t#lRB1w_F@569}ZT$Mq`TV`H`|m(k+aLj&`7_1-7X9 z;hm7KfhOb*9d#A;;`_-H2-o$Yf1cGs$24W|&L=`1wGW1aavuwv%(VyB_(X-m9Ao8& zBq&ncknggmCYfkH*iy^GkMyMIdZtaCCYRXXfb3H!|Okq$*InGQl1ZzO_f>z&2qh?Ou1V4xy zs=$5S(@s^lPO)${%=SSV)GOZhY*N$Sm>1L-i!1^xH|88PtBL6Bx>7Z+^3@i8WSOhR z$Ul1~PpxVGgf_OdpiaR~V>IA{V?rkA z?g^0Pg?th-kX*g){nI29#{mO+VN8U)X>$#602DZfFLc?PPZQsdxIf443jojnp5Q^f z@7g#@Gw<$k*VCHECHAj(xv$p0w=Jc)5v4U5TGQA(z*a|oK&VXrB^?gq_A;_ zHEY9`6qC))n+GY6lXr?%4b~FBEtPzg-Xd81R1}~%^09T4QYXO!R59LPA*9>CqCo7< z0whlw$(QJ+@dX;5r{3DUV_*m8Q-Il+~*6MW@VvYl$moT;@f?0vsGX zuS?f1ziUtg8}T}Bz}FF1;|Fx5igIyQhV_*CP#>Zo9UA+Rbv&S``@D<;H2QLZz<027T2PMVW=s2lDDinhCZEe~eSNUpQ)l&#sF7-;<89sVBHu?c}3 z8i0U6hlVn@!s^HT5n;k6Y+~ZLNmfsizqQ{w4EsAZ*zXbw(Fth(Lamj{y8aT67U%10 zfd(-aEgMYy(!mV|a=sz~c%uWczDTd*%OOQf;6TRj^c2(??kNf&rFYCz3CefRX+>21 zka(}HnwZEDnUdNa)v;K4cOh}-(6+#c*k2V`EuqJbt61>)_#9eF}l z0fVO1^DYp1{s|R3ZkeGIu7|cT$mT}qA~&G+#AXIP+D(sD?)FQ3v7Cyg-kH3ae`&^? zPCRnxj|1G9R)7~H^}z{0B7cw@2IBOL<(gw!pen%faq$Udnd43sb?HI7W<8DnoK3J; z&R1m(Zoc9Y{E`CZ=)^2nc{i9!>8mdAgat<&dO`L&FzoY-F>KM7*V!}IiXcF(KVMRd zRjhflGV=rt9Jp96(#JJx z$8jL^hh~how%s$Z1-)<016ZZ>N2cLBPZ}!tBr_|rwYGU2VC_3EJL|XeFFAhIA4WQzC8+;Qszp zO?Ldrn~&>eFoh`!T&`8rp}s+lv5ZNran|G&rVbaD>gdque$WxDD=-v|1|*lUq7uR! zaq^OLNKlDfWSZ!100m_j<;2?4M+V$t^V4Ep_=CR|*e|qfpy-LuRBn6kW=Kgr8=LZq zgE_AFg)r!9H4pL_-0{-`0J^vyHol>naNUS16qGYi0xF+UpX+>r0}bcQ(?lLU^pQId zc5b#dUy^Plq+%D8D2=L}qw<@zsMUL0G92aY=*&&t1gO2AUXxBQQYF*WQEia_1VbOj zm9s{Znc+moaAk@cRE8|XDrt07KnI4iuKEg7H53}O2qn2rm;7y~io?WSB(99CzO;~i@OebJ)DE0#tbdU$G285ux%Fbea@m*K58Az=bZ1`<9rB>x;r|Qpn6FzoBdm!C zJZQ!~=p(0(-{ZhrlOE-*ShocNAc5lIQ5&)hDgJp901tauM6vc&#UiYBOqOz&oq-bz zFZfI|1gx-P*tqgu?k5OuM6Vm#F9QnaZsONrdDy%eenQAYP9AmBNXmLF{**oEEpD}F zJ-6-2pnU)ENML0dw4;XG@y4oi_$S%&3e$$3H~EoSZ9k@8ndP&83nL($s3~_`Vg4?| z?fl?H_-}^D!WMuatgfkQU{(n)94S{~jxMy2(a8(#m)1a#yzG6i85I=+{>erIB)}@m z%&YhcsrBvr_WAX%CqsZ*!T)~?5Q%lLM_skxmh0HDxKM@X@3Khe^|1lh%m8pAI#FX4 zSntHKL_LKKK<{RHow9vBpatu=q@-ng=pcmVZVzoY#E#zp1&D&2_mwD=SX?;USY|Di znKQc8ddtaFlvEcra2vb9$9CIJ15GVA=m9R zh)D$h{6=m3qi4ryaK_8GB-=QpiKt-f>x3pAETbU0D5Qyp2WQ3w2C(V%YX15_3c4Ik z@>sE{)*@59@j)Ldw8ADelohyLAMVOt2VI>w*dAF5>HwhbVyV>@BehpgGd_$EWRMMD zEZZM55>7rrl5KX)54ickzF=d;puabvCV}6|5uE?ko0vb!dgwOuBNT+U?g|rEzq>)*Y8!Yf_EMxxjW7R80qZ3!f<-Ve3%tKau6S*eUe%0ZJCc zA@Q;dW4}Jl3Qd3e%&+t5Z0_4{9%>Q*m%-1qh3c{gN@N0xLEt{&k2a5UATuR+FsPGS z>ELKJSxHfe>Qelk%!ocSwNE5)BL;cpwrRPQt!l{FmdyhF{Nrp$xRc~FI_gGNmfQ70 zR}%T&MpcMupohjb@c%CG|GF#MA)x?T+%U{$2j1OCWWG^2mTNIby9JPcPnuATP-r!H zSM@?CK%Do393a9ev- zfQfcuRV;?z?2Ri?VQJVX1HNlLzVf%Y3)|ycw1Qd9;94Y%4n_W0kyKge3B zqJJ7BSXbh6DMhO)#xvD=QWxLdJlW(7W^~19;=XU?rw>WOCw=ZTcAc-F$JM}gVo}JL zIhhTU0t5*${q8}pc+gDz8SeWJ%b%TdyXzAm3L239WvchX#*#i&3q8Z;4JM8S#h(>v zK;zq5a-iQ8I64UN@3Nq5NPB={ZegtYWKlSWpjuI(_cL2+bo9x#f931R1WV)7ggBE{ zahDQTBYeQ2mxjf3rg#5y`3~{1iue*YH&M=B{1#8*^8z(0?vi_WTou#M z^8(MWPC9Sy-}!YVKDQSPzF?OJ)gdcMk`P*7!4(zAV?{jO%32gN;yGu}^tqIJZ|=_e4nZmKxgmnXna|5?YBoG4}PuJF^NbwHSTSn46-p&0cHFpU; zi}U~ui$(<=_%L)-WIbyuDtVo?;|tcx9Rd_IB>(R;N%h%o*1u-YMm}%gn!raHo!Ktg zSA1T8Vs#j5)W@`qXQ_aUbSntF=0G^M+3G}@!0Kz5Wr*Jbf|aoyU?XIYF`F`#(PS2N z>R$TxSKkoR+IYKVCyThO#i}g5H)Lqz&22lAOvw9|3a*Fa!ep;y89*=vfr-=LY?cBj zvqT0560pxFv^h+UxW`*A(R<|KNQHn@4#mbxOz|jQfWzzhYL$iznLo+YgqMUka^fXh z6-HzFEBo&7%l_aoiBGT|EMsf#6L~bW;I74NIE1^ULZx}tSxhQJNg2=2>nZSR>b)Qi zFxbfG^r#zEs`YwI6ql|>u&DQzfq~~=`qZ^O$IlIVop6b4;jg|!w+W(Lq4zt<4%J9dc zy4y2Dy-|v11|D`wc1IcQhGK>HhsyiHZ4Gu)z?-r^4m54lI3~7q}6zr z`P{+jv)D>GA63|hu;NFG-?g!Gg+sBeEHn1x-7q>NCL45_*r!cOe}GFA>T>Rt9A5aj z#}rhBaPU#J7#i#^Ow(<7^va-GK``Y1msZ1o*G7E6N0sDBRt%OY_t90TH7w#)@# z0KjyhIIn+x^QGc|&%^TtkBWW55|OO2Sb*aYsA~8-|Fk@nAJL;$&1!}FkA5qsrZ5$! z{j5r3TNW3xl1TwYgd$+(f1me#&Ui&R;UtmAvY=9HL&78I$po`35k`nGy$~fQMbqu7(%Tk#+VF+DB)h(- zJOxnE&!X_G|NcsZ8-qZ_zu^IyUPnx(#Izj~7B;9I4;$3K5k|+HT#Ny>@sM=;NUh7V zuqBSig$(P*!J;yNA6d8%0t`@aIQMGjL^t5|<(`9MG=N#YDHMU*0+H9hHbQ7&AtiSD z`0w52n}{v$+-K%$m`)qE+)Q=dmPvp7>K{((Bv&3KAyI$d@kx)}L7JIMEU0`~)c4PU z$Y67%&W6cTg{$bL8E&25wm>PD#fa8#=ZA};;tUx3l5Ua_-=?pz)bH~K+(Px)V%l%U z`K2n-kE06@x=iwk?oeDoaAQK^ndXNS+1lV7qUgNd1f~fPTkD=MF8AH_V+|pPM5C}( z*?2S&A(5n@z_<6heG@p1wOMzYZn}0?}|Von=Oi6GsEb2S{#if0Dn6@0HkWcVuh5Ey?*M)m}Mww5wA#T07UG zAk=oOW;v}H-RX}@075ec9W-G5E08a9{f;C_${)xUw_i}*5YlFkAh;F^eqf38&zNlc zE&lM7Q~XZXb)5Isy?=$^hb70egQ2UFWQ;=JnvzNh*RV2(_b&0wG-_f#PU$}NepQhh zpnCxft}p&1Hf}}nV%+f{*%P{6G0(Kk+I7kEM~?!lv-e`zat_C0kcMmc0F?Aephdyy zPjW2f&+NI-=CVlQS##802!oW>G^ZFX+uV~(GF#dz<zqv=;O4a|C8`17# zt>%b9R);Ejmg-}2L#`3%a*=yU@LG2{wCZ_*fIOe}J1rQ>9ZN~Bt}2&D9J8Czl!tV+hWt6B0j(%dMJzwPh=Am<2NK#gNQA zIlA=`D;Zsfi2pY5BWR^s~#yYXt?+(oU9#WH0 z=vRoblQpHV8W#F7>lCRGZD}T~cpI&{6($@29o3fC(-us$n}Lpw@ir8(j77(_`0_}W z+6v~34K)}<5>CkFIIWBEw%pNL#kYjOmWVMY5>ABSzd%a6t6lx>mAQk%{BEsB8PZ{4 zGkiX07X#EvqGxnfQ9i~CAGisZmENN9=hDvt!2u2V195D1aE=$&><1bXc4deEJd%Aj z;V$yO1gkyU!`C zz7ju%4AruWn7)xx85;y|^Dl;4M;2$3{KS3+C*@ai9Y9zji%GJcuFr`~9e~>}Un1^@qiq>84G7142Nh_?*VH zGDI)sS=8VhqI3`}jdrO#n?<7kj*xYbv}=g8%%8>J;RSh9`vep%*ttEKT4}<&s z1DA!?(>UeUW-4rhEV_`;f4jO(AVc9io45O*>3;dWQi?Fg-DF(w9Q!kLcji2e~zt=H^<( zdQF5MU!KJ$L4dPXBjt7)adGmyPuWX(TlD1Njej*R(LDEAcHj>ka!qaeD9OC=my7(c9&DMLbw5h zhi6Q8mw~lk(smox_FFLehe0$~G~HuR4Hy@!>-3*%gpxuj2|vlfS?1(^2ykWy5d6smq2@D^|#3`=rEP4y#K>|D}=P^swittG<{8moIh%Cf@7@)58I7bKi z29c;?h(=8wAn7JkA;^rb4ZxscJq?fg#}X`xOBB$z-h|-kRH^LA2;<8|!4~lnEf%?% zVdFVSiITy6i$wUm55tHxN91!b4z`X;KP?lU^zm0A5*^(d76pKU1cD$yg=_H(zi`<3 z9|v=9N01`x+vE8NPw@yR)qRO|mM)|Y<@pG_?Xr^%_$5HGGPdR=9;9(#r3DU8{GIk& z4rT0hAu;JhEf2?N&?944s-h%v-}p9hC;XwveOk)?cC|pTqo{8WcbFDqJRBIDtE!n+ zvUGBj7}zR&k?Gw2$oziE3PC3}@Z?yn>-bKgCSv1;0cCtZ=V=OGaFU;{!wL~K=@W?w z^b}p;fdjB#Jo)Ua5zp|2u`LqU#*|`TF=<51gQ%F-Y~;5iwZZN7Rc)?n;TeMuv?V1q zK$wr-3r1Hl&&mt{HU}$4n=CcsUpc;(uPJ74_l@5v2xojxp7b4@hs`5Kq|-2?%K{Kt z5q%Z7w8Ap8J1x5lbgVrMwE2g}7Fqna*a9{}gbO&5`16C{dz0Zl(O!%}xXAUVm(B-W zD|0qkccZcm3H4v4QQ8=I9}#lHKYKoXRPxJ2cCL}Jdi4^HLt&t_bc}`~ArNdxCW;mK zB9{w%;(57G;TI@#$y@o~bivSi{~@QoFF${GZfrXjp69tq-iLTk2B~`I|L)^zv}5Ru z7t4DI9jcgH*4JG?(H7b!cYi*1XV`PZAI!Kp{H{R2x`Rf`Grw)s4WcsQ;vnv?W}{n$ zy=l*{&tRPK?4Ai=l&$4>KgYT}pf_v$x_3ZMh8gfCjQScV2W`=;$1xH=*K{08mpNII zz(B;BVrc2g$WouaIG%+$)DZMg)#|MCn;Z36^BBxg~vj@F@YPj7xg! z3YCsKWzDGZnbDk&1i`obV&I96PC)aF^vvfYdgWWXp2F=zLwwN4vO)O4907&21rv{G z>RLCO9^!(%(pQ4>`)rb>c1RRUCN?v`0FuGtJ0W$-3@{P-8bcl*P%>A*3hdSN0~kA! z{d|XO;X;iI0L)3+{-gEVrXDGOw_Ryehy0LU0!`ye^7UZ#t@2=a@&{+NZFg#>ev&p7)7kNjrrNhAhC=Zd z8&~>d`9k(gR8DfRb0MdtAL4I(1-i~J9=J>wE~I)^YFjRlRONlgi-Z_2UDL;O@x%ZO z_jdJ)L2|-^kfmStb|#l;K0(nXB}{*-34YFV4pw$9o5rFAn)8|A*$I}^*kgptuDHeZ z|Jr6mFPLVV9&MkI8~cmdi-y84)@quysVl<0I#W!||NRX)x;c<(u%~mh(w*V>@)G;n zD`1s0`R4FT*zt6Xk+QI+&=lYJwFO7-x5*hu zJ89E+lEzmQ)#0f4gN<=zZ=kjXlJM*zs!5)O1YIUt~o&5D+XxCq4cF^E>iVVygNe z;H8G_Z#QNX1F(mf%JNzc>B2SrjpHYB0{nZQFJmX|v;Yduc>Fk6IbC{p-)7ST4CO5o z`A({0S*!@+TI|nmxII&N6$^Mc_^|P`xq?~7$QcKdh;m*{Ik&GL^+_^e(>#*a{$*%+ z!5O|k?_VzLoW(6V*i;_P+;c732g|3vhCxg6pmP!cgjb*$vi_*AgC|JOTmbiMfqbu~?=R8MLDD|_dn99MbgCEj zXu2pQCuhdtj1N|JCG%s3oubAf7QOOW^JDWbizmK99op^{oc%~4BlATM;;D4E^8;Mq zn#<#{aOKCqmo$*vnv7TcU(QQ(%?}m=r~V?^-hN52_*!wtRCumDfJUsg`yfAsO&$o5 zeU3Q8mYRD%8yi4CipMrQ9xn%AX{NORT&X=9e&hNS>C@a;iw@WL;{Ucxj-{i*)~15Y z*dEkcV`a&0oBUb*1r$(q-AN_DWJfyMrG>x;UgIr&`1RupAy2_#T(y)&c7nVRB1`8Y_4rc#?APlko5a&ajgIQovdL0}lmK@~h)Xj8f79{gbty%^ufN?+ zqj&(ylJMx5XKYgKXJKK_)c@3I)?rQk@BiP%h|x|u2TY^~NQaD+kd}!cj8syTQfiD6 zq#FfeNJtIFfYB0)v~;Ju0i~s+;mhZ`e!p{_^XGZp=iKqSAJ5ly%O@WO;x6u5-!g^* zXFqRLk(;T^BqgiLFjZSoO?ztn_g@U@K7V$$hKy>$j{~O9-H=E}ZW1fF74XT~LkO`p zy3ra3W}0e%1M0#I>qAynQFSKsvW^fsOcdr2B(FfChW!y?KlOy55w?g(P z=OembHj&|mzqAo)W!|Wtg*aW@6$rOwc%&w=I+0GsWVaP3!vO;V-*CVZD8ipagr?^u z;O3T$Z^+%yx-k=#^>^>5H1-KDU6rGsX*Ffqv7wgxcX$R-GZg?hnhE;|hm~(Tmj;0s z`GI*6#0mj^AQfc_Fkkic;=h>`%+=Yqo5(IXt~bUXoxG@77(7PqfkA%|b`1(?d;qXaBG=W0)thWCcoQ)-MN#;P7Xk_@!$0Z zPwf$Zu~^bU2{IyiVq!!JLd>uaHtGVf#adOy3c-eGG>?jAG5OS&6{5ZSQD+6&#WNxk zK+c3I=b8n#6y(&n*}Jn;q-P7G_wVrcmqJ|>QnFsj^l86NuVd_!x_O;|2EsY zKzzuSq|0C5I_}J;z7tBx$y>72M+DQ@vpI_f_9?+lWVvyx#c^e&wzzEc=&!6nH8W`P z&qh5!Hr~V|WG)3egy`lUAl=H@ZlQp2J_R+&%^r}G$oa zJub*X8lvGMhvgD22kl2GeqqkKP=X;PwZ#)_(Ou)VufaDX3v`~44iB|JAMJMcXejJ@ zpS->Jd)O=E$&(1SRDD$r?5Z@8JoZJ(rhZNodl~Zt0AZZ>VF*%idXPJW^j>EguByPw zaWJ=WAsX?4(PsbRZn?z+ya#`Oe7HPBquFCF9m(Ou4L3Q?fWxYM zsQhP9O+OSD%FA zBD#>2{}`?0CE|0ler4DnI_Np(TvNDV0AfnTn|NACINlD+7670h74az2eEWs-onM9t zAxFSYGbr%OdBp~05PDl=*B17=W+&U0qJ)%G&+u6aHsEpB zGYZHbhv6jGo^g;8_mQTkpJrazW)Jhhry%+hfzl_?K~bZ-2LfSc4GJXyIT2mt4Q?#B z1axEPTSWTBBo}nD&%$@?+DHW0MQ?mNOUhZ(p7jEbO4cH0Q17?|#vw*>>L*-h!;e)=Liv0bwKP@G-ukq8xb)!MJu`5M-~VLR#t zaownhD1eM(g>VFN$mtev-zJGzHiK{@W@LL7m|W5sf@JRj0`Pj|=JE(N5?!1~n^`vJ ziUt!v{kZko<;5(2gf!UrF(=OO&ZICcI(1M%>>ePq8?k0b;Hg5UU*~RE$JLE++?>v? zPLVJp2o5=Yh<5&TwX_dFLy;1yFbLDvldgtV4XRU`IJcQ{`_jrj)eC50-8od}kD)6Y zv`7(@Z1q-}X*Zq*r%yrcRioJwYtCVj4Kg?1EN>NJ1p}8Wh(8d`k;mU5$-!{kRC#(_ zX1Soe3o*3`0r|VHfqLe2sVaC2+$HU6{Gnw2%{#`~S9=?E0e>EmqJq>xqrDZgq@h%7 zfXt6rEyFaO$n7W^TE;E8WmVer(sfeLSE0^o5<2SC3J!B5goBq!w@+jjIk3x4bHb7T zW(aWHlRRW9Y_f{gsWWlNn4Ah24%pp~zQLPTNS3{oK?;2ExVu6Fr6}~%TpUm879pfN z>oM7Hov;#K9G|RjwH<@jreA(+J2gO5DpN`WRL+OFWX`ms&|mF}MIvxhT08BxzW4UJ zY&RL+e|q1!T+4aJzz?7|{taR8&wX7}7BeggH*%aen&>R~!%9>VEzkP(Kz>*YE=DjV zFL-)u2tnN=5SE1&S?#LPMH;lo!A*>o)lww2q}7rRf773NQD{&xYFmc3IQ^6+Q zr-hX{X~FGIo9n8139+B5z}FQ0t1Y4+oJ(&Jp3J2fdtCn;Ku8JeN15lP_GdbeEkalx z<`Gk|jxmN*a*15zOPJ<|EN)5zsjL$Rwl z^S8nKIgHiT9lv|s)3CSgGpv`2Ccq`SxcBmZDT(XxDd6?gaR0KeS`R*fi^NzuVU7L* zlZ{!P7P9L|s==7#_eyUr?-*Bkb!w*!p{(<~p|2_EZ(GWo^EWpBv;=mv9A#-T zeGc7)5)bKi6JOJ3wyTF+Xrx(aBv|;?>0u-Jh_yX9Nnc^Z|lyzwYjgScRdR~z+OQEEBrpuODA>*#3ZcCEYa zye&Bq&jZy+ZrZ*0S#^gR3~aA$B4^{}z4#EOBE!*@nK>A=E}WlLftAVjU~ZO!m>8le z6!uk&h8{;><4YEY%`ETsf1)QpzXnIopTg|`as3E=i$nSr-MlalN3#9U^DnRjLDJWq z=3#kYU+eT^D9-;;8aj3++jEb}sJ7+-O0JvNhPtfL`OO`U z-Rdx2ah-u!%X>Lo(u9VWO?>OSr`D%++=R}|7*o<~9YH8V{*Z=0_r((f zLxhM+!Nno19-^=vss|9Ke#@J}qk=%P#g&gmso?yc=PAhd*uFm4b(ZN*u(>YQ_Aara zkymS9)Pqz9bsWFL7c*rGJmGdAgUkY|&fCaod|QujFQe~|>w8MH{ltQjKTL_}Z%2-2 zIo+pudzFWP+1PY!I(|Y9KCAi=9Y-73n3itZi?8OuoTMQ!s&Qpu;QOu!>*8S6R+_GQ z^#^QnU9)IX@92g*e36FbHsruDd9LvWmjdkWqZ;QLP?bAi_7_lbw z3?NueQUKykD>@c0L=BDhcffMb8a)ZZ=GmX?-C2Whg`OSZLjktaN7V2&Bmt{3uExVHS6pAP8TW(MvqrHLLWwGWA#D7R=Jsb}&ww(*TISjd-%v@c$XwrUs>(?2(dhF++$ z5NY%0eoTgu;ojk2;`a`pAY`c}d=M8S?6CVQZMuIy1?6@iUKC*-v>%(M z@Y~!YF{Wxw8c6-h6bBUOO4uG;8QEpppWEzcY-~49{V2&)D*7)gIm75d7e&7y@zC$_ z+4=&pf-{Pbba5O#SbufbshKBBw3c=3ejL7>!S4?Vm0tf#lUrk2lSG_*cuXD+{~p2n zs)=K8FY4A`dTmf_1}0!#xkxtsJs3cAr2liIo|P$gDPVBI5V#I&XD?)181R@VxkKQvi?GyRF~UQ%U)E=+H5wvjR|N#*l)iAOP6?z2b? zIbZmbn{y9s+xF0*kc+CPVJI4FwF99ayb7&fZxq!i*RZMRA(ha2AbqV_%$gr=>WL?B z34aaO97&Lm9<%u3PO`o3&7iWbQ*3B!)A=hHp(~4Q@(lE%=>!g7$^$l3N2Z64i_I#D z&p*!hg~O56)y<~w)U;7u-p~H03ZHMF;QKV2acXkVF6l_?!olgr+Tbq*as*W;<`h8w z7kkF+t z-jG?|`1)09v*+utRFkpE{b-KQy}bZPHHi1cVjd6{*lP7`w;cES?=>Pc>L>c9)C{4au742^dhxS^c-dihJq&*{5U3B z&Y2I4K0JBKN#}8~NGW;Q3Uo3X@B;O=jsVx1)F7|Ao^e3Z&-K4ggsH>6-t7^o;)>`t zadG8$nB2?uRsWM*5l5|D*Z)8eh%qYaMYN6ETYuzl#x@kjk{%aWIG@1? z_}8*if$`L-(sc%c&CPMHCE-ys)E#mdX5*Gfu}LFs`^e*Ui!#hn(4UWYZ6?*N4DYL* zs{3x&*S_8V?TIQ%>&+;EFS-~ATE|sLSbJE|jA^1y!x0Ckz^FYxhbh#WS%J5h6JI$) ztS<0jVwe7+*^6AR1KR-4iSX=+==eHKIViT&wEOyh^4Y`5D3RHQ`JIWCJ~{OYZz<<< zp6u`c5f_8K|Dfr+qe}I1o?_Ys3on*dWqfj$f)yfzUBT=3-gv(Oo5+aN0HG$Zg6mMd z$b#_UF76I<$}<`nSG8$+64Eb*ArX-`f^KX~OOM;Wy206LOsEg30)CeYe*^Kj7);6y z`OV!GXp^}>CidV{_!=BgLvMF17OV=zf<2TWOf_qIAg=k(w)$JPL}nyh^m(UQShp%> zrKs&Q4dZgk5tCn6jJy&jB<7n4bkQ^wyEzk0nA163irbtE>$hd?A`4v;w0V24K{Cm1 zMouf}A8fXnrj2dkPxYja7lFt88CDQ#!pVI56{D>-d;9K-WSt1FOJP|_MJM)D_lcpV zjq;`2sPEFQn&dPl)qKBLsja7C#eX$Q0=t)><4If{Kf2R!8ym>Hw$_Cmf$W@bHGTwRpA%)aDa?KSUf5Xu;_sugPRDEuP{)iR7(A8Pjie-7ZaCmPu&0jhFS%V zG`uf*9pJQ6=|XRYTAuT8b{gScZgaWR_O9PTBe#x*rfXS8c0$#{cj;YPqPY}wTJ!~g zCCB;qA^N28WM;K%3;8AatNi8Kpul(59trxbq;SS3e;H&z$$=$M;rNeWra2W)Zk7(I zE|Mn|Z-D$Tt1bA$r>8rNlQZXkH0S?LOx)mAjXfR}j=XaxX1k8;S0Xo+pr9q0kvU_w z#i^_e#0lUkcjMVREfUe;(>2ZvyHBKaNaAdL|qeXAw4w^O@H3C5fw_(A@_D0Q&iK&V19GgH=N< z>)P0Rjlb&jeTN=QvEBl^8tmpU|_Swd23Niq^$TiIAM9FbuIg@U3|6x2;*$A*%k_(QN zA%{?fM^J{XxjNlT1e^@+qbN0EH-<61-6#9HKR;2jb2@I2`zT+3G-2jVzo7f5jN{iQ ziz~`-h)L}8CnRq^#tXK!Vd&?rnJKEM-`5>SD*{V)SDREBwtVV$fDU2aT@lsI%$#K8 zW&9SbXhQv6ilcTRQG6!|!J+(QiXYa8X}c+gY2|q`nmqAM5)QCSmIY4gA>#qx-!_3f zG(4U|yY}0SKoRkRdnlw~oZ_J_0ES~H&xM4~H6jZOlbeIVzL)FxdJ7jC4)**k3BKl6 zDXPYjcN6V4P?0mW0+S8QZOh)Wo1oy+>+?wnIPUk0Ti9l`Bs+Q>&p|UEXaeQ z3@-vs_wVeUNsx*$mH7a<#cip5dJmjzm=e78v51GZ{nsLa9CTC}17xxCs_XoxG2_Y7 z_vC-^ACzR-pYjY$?pmj$mTT&Jn?(0FczmBv8uP;!EoLvoOABG1Kz$nwJF`ik=pwQM5v(lMU*BClMoVi{o%|1b4(Tp3n&= z^KoXQ&dUaa+T||mzJ_mJQ#!|A*VYP)u5MVUOx6)0f66HoFBk4llRxHQ9<4)`f1@&J z3otVp!Dw=+0)l`Jj6P~9sqMUslhGSZVTV|<~0EIDOXcQ|Ol zoBeR0_O7@fPh}V%49vsg z$qy+in&E;9jn0}uCt=6Sh++W%0_jeyP=urdTZ#pM?JIc{gs+L42f|Lj2da2>4rRn& z3A~O6>BZaiviQ!QvRs!U15_KQ7q8wbOD+XX(MLB47ECEu zq+E!!^A0~e?U4C=+?iUN4W{_;o6|GICb{+6B|I;a0O<-@#d_2_Hq3CzYfL(qGlK%K zLSP`0l_mM(UAl(4*1)Pzwi|Vt^ymJZ_>dqe|G(;5cV;FVB4lQdC{Z?qQSi8lJE#HA zGG%=a#0lmN4ffFi=+9>_Rxtlotz8FHliSh_MS2k_3ep7;q)V?CY0{e@O)x-!&ioLb6mVLfe+W7Bm~Lnh_u_q5VgIpSM8K5Jm>1%WAj@0gm@JP5qsx6aj0`hP<0^v(Sfao z%Qp`@T|Jn2T|~aEe751O&t-9Y3%pqr-r!fBEB$LU@6D+Ku|IatViS)@6(xO(#&Dd_ z8@uH?tr;q$fWee?pwV1YEbU)QV9i89F(M^Rnpto6@RIc|ItR_wV3w)`TH_;?LmymE z@*^=ZJ_C7{&+-lZ0@C_I3PpyQJOFxzc??5+-&|*2m!(zmqW01{?0rv-kKr|Z;i~fq zFJ)6hFAH)<#fLkr_uF>$tHegM6WTueAW#PEh>45|0hVOF`!KEXsN2(G#D3ji;s`Z@ z^1?O$+YV*!*!;sNxR!Z~)SN|f2YaK}zBXg^6}gx~-w!mAhyt}psPHyF)&eaQ#?wQ2 zvZNLQG>cMl`L^?@-{TO;FG%z5pww3H4zHx|<)?Jos$INi>gZ@MLvwXqiKlv=^M0K@ zw?!1yD8yEfZFBlf5PoP)g);`19CkpbDGFaOjIZB>&%EDlL~GI0j;%gT?zuEH2M zO5#`Y56IS0J67^QAEC7{8|LQV2EfW7z>gW@6F`3UQHgrR2Rc~_t@h6Ztj;b^{MxH< z=-GyeAd73unFjRp0_)Fo5D?!qkiod9^1&+dGwN6PC2U2qhIE4O7l<2orQsZUms0NS z)8cE`-ym4*0n;Q)1<)Q{*{i!xxBya`Wq#6)QCmU5SAgDX6>3=Mf5+-^72~^S+p=D5 zcNsg4-`#AK&k6h@M#aOJ>oTm>*3n9tJ<*O=8|WtndFMqWUM{HcUiQ*m_#?N{daQPo zadm--mqZxBH(tmxhN6Tg$HfkzDh^j<&tN1><9k6HLV=#2-&QI@JdbYHvtEUaZ%&Moqpm`?j@ zxMC=t+owr4t`cR}bkJD#>=^ztkavJ)|BOgY^}V>qQ#|ioD$9BVX(A4!j1rMR~Viz#T`mw6C79wzmWg)}Z)*8J`k{U(Ue zSk~ZWk%^vPQB!GQ`=zbeJLFI5;H3H)s>0Rx!+g45+gH=iSKl7AAQNHXU$hVkmKl(N zYi>F|f{!!vV5NU*hX`;omxR)@Yd7VE3dj^#9O%i zYryGN))sN<_z*gEe?I7{eL&0%!TWBxaE{@sk4l`F;->uQ5n!-|g8=L3`&78*heVyR zBVk4oUB;mG2<6LZOu1qrLRYbX7$c2Aq`TTfZ(xb}t5r9@0`3bAy>PLo9v8oK>q{Jd zo9))kDzV0X{8{(i+@5$&oMt>9aW_M&p=`rVY9+&H90QKbM_4?spqXhJ1zF*<5Y-nk zy$wkZrEILNLHN3UyVDao)P4S|jj%m+{z>htLJOMU@Y)ut>}5-fsDS-mX~5E>a+*%z zXX7|`UxjrKrw*hHWW1Fno}@H+gnZfjiql}~Qn{J-W4t0^ts68~o?hCTlF`Oktp_$d zmE^j%O4OObjWsOTyG*uoY%pp)ZDK{1jCBH5e`B4^fkiX)q&}d`km`oj* zYeK~OX~FyH6-61_Mbq%;&#yE%3KeGkGBIk8R2E*tRkglu_)o}!f>i|T424rSzO9&$ zyZJ%~Ee^M&9c~Xt3fCM?FhpkLOUKi&@OFLKC8ujN!y{N+j(kvcKU9uB`Z;`3JMGQ# zC(&CXQgDo7ve3xUH(`wXWJJNF12Xw8?N@`j6z$1!W^Y&UAJk=4KkWA72)ela1p=P` zsyDAzw`c7L!;5@Bni)v|r_d|AA4QTt{6eKeG|D5FN05>9;4m(~7_HH(o0i|l5&lI7 zzNni(C7hh<%^Uc{d+Sx_y|?DSwyT>BM=6-gP_3)b`exp2@_U2|1F4a;< zk;t%z%T|bY`G&?LE9EQM55MU95X9o}ah6k(=k2mbRzJsaL2i$i^SB$yBd_wcVw#ZN z7~RkKN`SN z8}E~UfUvScv)GiwF4{p0x@oD7{|Z#k%$AYa5yRUP!aL7Mtn#>!mr6~1+-G2rLM4m=Le+ z+Sbx$RT8j?_iY7RV~A0a)J{}n-^bjMj7{C=xA}v=luxsc-Rd>IM_0OudTUo_u2Y?8 z0+U~x%|9+w-s&Rj_F`@H@*58y^-W?+Twx7lo39fSBdcqlE-djv{qC|p0M@_nSqSKs zeZ(^eQWliFF|6r6$o^vRO|%4G#UCXXafew-!Gmn7!hFz{DnfJWm%Hf?)9f&AAio{7 z6x}_iKt+^OH`buUwcwIL63&aF^clnDZ{))C-DqTmPY0N`0kWQh$S^u0m)#ybT`?~yTW=b3CMwb4% zyB5(fOf#xtzuM>K7K|CVNiX~9AXD>?NSB7IX`f2>Ik-h;4d|>tu&s$TC@5*0(WZBA zJ}0=d-lh%H&wjE{rBB`4D-_S&Uhw9{ht_8+Liq@@5h?dwDYJf!wIlOyP@pVghg-0C zIKBU&&m#YA=PMTR>d_d%$ielO7l2J(s;Q0RHAY+UNq~kZ z)aBCXyvn>#>#h|_rMWUB`p4q_i$dFp%Pmp(nv~P87{D?I9SZ#@<&j{%Q8OF2l7Q9L z19yQHk+QzI-PsiG(WTduZ5@g0*^P;MlJyifq#J>g{_j1y0?9MqsystP6TVi>PSli4 zr}kk}M5A9UlY-uLBN z2^0WZm(I<3n|7h{XBG<%Mw;6^#jSe5v>{Yjm0WyB(AH#t*=*$fXtFwQ z2M=F&60MVmE;CW#0d}ZuRryQlZCHWKChvGSWFBUYS)3ZZQo8;;A>M$i!}c8)1$0(v zh(v-(7$Z%ZqWL{Ep(T-Vnsxj6HLT}VFS6#$$Y{N6q?!UP?Kxvm_?gRH&7AkY6<%m0 z_5DiBLhHU$_T;s!yU2>=;I|Z?3Av&Jblnm{!Nh*X_bN}mW;nPxcox^og(rW0)T_@v zrxzFdMs?y)rgCOcZoG2e{Eb>-18AG8Kw5mgoRyy?Ao- zc#`doQ2~>qbd)m%5j;3s*|{niGkIz}Wj}Ya@@59T6!L2xc0)F5C+~O7_uVrQZW?9F zZj0_}IhvoRaHO_W6M5ank!rIw{HXXtD)BPn)7pSW-hG%7cL~0x?)C-4&r~82ezBP! zuN?W@*=VP+Fr*Pvc|IpPYN9IaMxm<|R%EUao<+`bSU=aJIp;{{SA4lOPiwm)G{_6= zE>^ndr~ncenX)e`X4jj7BuQR36>usU#%99yBOJLpwY=q8F+0moFg7v{Eo?jxAuUy- z3=wv+!P0TK%-cL-ZWwF#+$>$BaGrbdL7Swd-P^foLU^eG0V`$aeZ;bInL}=R7%-F& zu&Z%cwlVO>jeE=;kKg1_O(orlNp_Z`s{%xCB)7PiAi$VFG+~FS!}v#&)XXjC4o1Aex5hxfyH>3T(XbqRX7fWGgoNG zHf*~vEi(*uw!H(o?+qVRS|gaE?h2->7Z6;B1`rx)y3>TJcpys(_s3~BdDkkPc0;)1 z*t=6)0r#ezuTI`Jn-rpY710`b)3jrkNcp8!3B~ih!qUs_>gsRbGn*VwV|us7 zrr5YLAz(8ZEtM!la(o8n?K4xc<@0pa51ngaLry6CvN&lurl8$Bp*X^x+lJdrs~nQAOQiwzw_!wTtz}2nFc~r=j|`KkBaU*w`xPYPNN)!f;8YVxLEv zYUOoIn|6KHB_xO6j&Mp0$7@Mx?uV!}gF%v_E{XchHw$-ZrDjsNyu~|@*0RU?JB=6` zaBr#1U6pmY&}tKkuaL3q)Gwo&65rH1H04Zcei&om?e6%HIg!SOHcPByt)pkBP9bDs zQe*xrYFj6djVco(no7!LE5?P{8-e+)?62mmaNM~P?O5#^Sf_&UsY+QS6NSFT%%?m6 zXjB)*CmE2-Op@wcvd@@dnG3%ZuX)AC=iog_7ddE9oZN_8Eo+bbc`wYIYl65;2~23A zEJj$J<1(!?3{z`*2?}$U3_Y5czQDX9edqCg;w6qzNfp7|{<$X19E)6nNfs*VO$B9c zx^$XURBgTOQ9}g9=e{^iW!6g`*}#Lx2hXo(=XodQ`NuC481}He2UZ^>6t*s39mC#s z{))bRpZnXXb;AtI2%uQym)N%ZMh4 zs1;Y5m2xu1{wurjT&D3mWcvbBFD3Q>Xo8IdcryEW=rm5LRr^hxH1-9=tw8x62bpcj z36|T|kQp}L`gL|z+E2A#>E5+0A#?^)^w#B>ia|ts(|eN2bqKc&b33_VGo4{F?1qk4J*Z2|?O5`WENy;0j@9NwkeET%XnTfq_tFe}P zOTS^jO_F$Gj}$GR>@lq;7X8)HcrlH^`v;r5MMoO2PoojhMnYCX7%*7H)JE2*SL{ug zT605BC|c1&>R7{=P@C7Y&RPD%js>lUXcymZU=}dMQIEhA0BMS|AG~dDlKLx&C?c%7X~Xht2c!`L}+B~E)Fb`{}qsHeeZF*{D$Q{;Ma-Bj`JTfdgIn8^e$ zIbe}YrQ%ltEbs&6`(0QJeql?Ahnvf}hnd4@A9de-ub?y&3dI}(k|39||gd3VXQ zjUfe|n8tg?_e$;sq9w{38sf({vt3-;Nq>O1Vqb-GG_6;cNyZK?HAIvn+^(~3ZbduG zYRkMr;T#Sdyi5DiG^Q1D7Zx+ZKR7V75OF;#1fPPhUs}=<-3WTfh_y7PHX!|?p*&1T zM*`pxVmp=;=L~($Y%Hq8;8rou(QTPvLoH}rwyLW_$%N{l#=BEA+!fJS|JAI-$rw-Uc%wa~bMkm2qv?Gqa-hTC?&gZlQU&X_i4;T!$0>VTYa+aRGky~d%M^{zeiweKb3nf5_drdk^RMmyn}k*x0GuAjeu3;CoMvp8mm+!H>89% zhMqi)$0V{H$3(Fi!>&-9;`qG1WDvrSHTTqu_$A;v3CbUDH&vxjG1&vg;`QEQZ7vNq zZ4N~AW8(-nG#NyD0mw$w^B=0{(>#$?x#8(+1dL*Te$gP|LIo&7ScF=cXtC zKOt&}mYI(~sMiwiU2l&On3`(YBkJ*v_^F#0YmtGLUh=4H&Kv}(a=c%SjZcxjg}VR z=O9MjQvzrK4j?ELY-8rY;b0810YbpGMvh=B6A*;m1PIl1}5W4Z?lynYYl z=zn9&b)H!< zq}jJ4|4Udg@}ur%dK?t=?+|60;NliCk?2A~W4VTU(=z-s*shM&z$H+x@3)X~wtjKT zfjcALURU|2h1Yz5XDqycw9wH31o>gnhQpEZzqJSjl?3gFmlmD?wVVVNBtb}9T#y$o z669p0z>j~E0y$VhZSB~>HsJ44`67Y;jf+$jegGdo?UK%`Gqb}u1&ok*hx`k$jh!R( z$7D%lWlUcFJNf^_JaG9lq>ef8&S0XVoM0k-0sQIM3cBszk>Mm*QIBDfgX9!)3S99V zvK80?$_{n;Gi0w=*nJP5b0K@8$4NT8Ozg4OMWRFgg(2@812Rz@ekuMRlH!Enc+Th5 zAwOjULc$7Z+7HEzz&0kxBs4|_KNJE4+d%)BR6I)d_m1gKq~bV5lLD{*Pc;9^{L4mZ zWYU`=ujBiFqvA2?!*f}EHUe%V)AWap=B_3X+aIxYqvt=Q_MfqJ9{KVqvk(#i=a7L` zR<_397m$BM!eV`PQ6fG=GI5T?*w)(G*5(Jn*Dt?i#d(6`MfYSNB)HMf>hqJp7-$DH z218wco9QH;kC9Kp;COvK87c69b4=g;@yprA0sS{Aa}0GppwFtcc8i44hH?qP5R#p zV&FM~v*|&Nabl1a*v9fF^7en)`Tk4~><4Ef_w+u2kTCxf*%oN>BaUwW-jvTf`O+yL zASCk70{=hwnC=vQ3LFLVmn6p%|KDAFUZ=C>_@AXa?cv{)%k-Rw2~U}WkQn}jZw+=Z zW-|l*Ts=^J&ll$Z5&Q<$iB`x`2Ly3CiOmbY2Y&fn1f7jd$6w$MwpPHOHH_i!A)gPS zvuRa*4*CBfhR&`N7qL%t`7!nXJ&4Xrb=IKhb9|?x=re}K6FoZQG?+{4|=3(H;v}l4GBsY_oTV`N7mCxq z{tbT|TS0NoinH5X=`RXrWDWhp(I&q)_w#Gi*?r;oDEXss{SJIuoBy(uIlIlN{w4nZ zP|2Lv>8v?JXX#G+;lGp%^Hg3SBv=Rl04t#DujS$&X!HUu&dY;zv=s6>z3QdFZT|%Q zf&EX@><#iLeH=q4y*Y>YZ~N@>v}JpGWMzuGk4=UVXdaQm0NkslS}$x_zZXcs|*0st%`uhZF?0`EEv zdwlKyA&d`5TKOa*Z&-@jaTEZ0apk#*Jq)xB*DR60`Q!_`FJ|GCt3fWfz{_^9o zA%DYiEc*FWJ-hn`AzMwq^g$LvwvG_vlS=3x#Np|l+37=C3Vi&p34S!1z{a-UeexS8 zs1u)@ZjYU1lAk+e%74+3JKcQvt9IM5n8yPl(Q=0AA2r`lP{q+mkpBLw_8Y?wd;l#h z1x`7SH0E?O4(DfBqaecsmZg;==0AjT{{FNDf7MR=iI21(6AAyD&9w{2GYbH43wgVb MY)P&lp9lc_9| None: """ The purpose of this integration test is to test the endpoints @@ -66,7 +72,7 @@ def test_lifecycle__nominal( res = client.put( f"/v1/studies/{study_id}/upgrade", headers=user_headers, - params={"target_version": 860}, + params={"target_version": study_version}, ) res.raise_for_status() task_id = res.json() @@ -99,10 +105,9 @@ def test_lifecycle__nominal( area_id = transform_name_to_id("FR") siemens_battery = "Siemens Battery" - # Un attempt to create a short-term storage without name + # An attempt to create a short-term storage without name # should raise a validation error (other properties are optional). - # Un attempt to create a short-term storage with an empty name - # or an invalid name should also raise a validation error. + # The same goes for empty or invalid names attempts = [{}, {"name": ""}, {"name": "!??"}] for attempt in attempts: res = client.post( @@ -122,6 +127,8 @@ def test_lifecycle__nominal( "withdrawalNominalCapacity": 1350, "reservoirCapacity": 1500, } + if study_version < 880: + del siemens_properties["enabled"] # only exists since v8.8 res = client.post( f"/v1/studies/{study_id}/areas/{area_id}/storages", headers=user_headers, @@ -130,7 +137,7 @@ def test_lifecycle__nominal( assert res.status_code == 200, res.json() siemens_battery_id = res.json()["id"] assert siemens_battery_id == transform_name_to_id(siemens_battery) - siemens_config = {**siemens_properties, "id": siemens_battery_id} + siemens_config = {**siemens_properties, "id": siemens_battery_id, "enabled": True} assert res.json() == siemens_config # reading the properties of a short-term storage @@ -517,6 +524,20 @@ def test_lifecycle__nominal( assert siemens_battery.lower() in description assert obj["exception"] == "DuplicateSTStorage" + # Cannot specify the field 'enabled' before v8.8 + properties = {"enabled": False, "name": "fake_name", "group": "Battery"} + res = client.post( + f"/v1/studies/{study_id}/areas/{area_id}/storages", + headers=user_headers, + json=properties, + ) + if study_version < 880: + assert res.status_code == 422 + assert res.json()["exception"] == "ValidationError" + else: + assert res.status_code == 200 + assert res.json()["enabled"] is False + @pytest.mark.parametrize("study_type", ["raw", "variant"]) def test__default_values(self, client: TestClient, user_access_token: str, study_type: str) -> None: """ diff --git a/tests/storage/business/test_study_version_upgrader.py b/tests/storage/business/test_study_version_upgrader.py index 2d85c7f718..efe1e75315 100644 --- a/tests/storage/business/test_study_version_upgrader.py +++ b/tests/storage/business/test_study_version_upgrader.py @@ -31,7 +31,7 @@ def test_end_to_end_upgrades(tmp_path: Path): old_areas_values = get_old_area_values(study_dir) old_binding_constraint_values = get_old_binding_constraint_values(study_dir) # Only checks if the study_upgrader can go from the first supported version to the last one - target_version = "870" + target_version = "880" upgrade_study(study_dir, target_version) assert_study_antares_file_is_updated(study_dir, target_version) assert_settings_are_updated(study_dir, old_values) @@ -235,9 +235,23 @@ def are_same_dir(dir1, dir2) -> bool: dirs_cmp = filecmp.dircmp(dir1, dir2) if len(dirs_cmp.left_only) > 0 or len(dirs_cmp.right_only) > 0 or len(dirs_cmp.funny_files) > 0: return False + path_dir1 = Path(dir1) + path_dir2 = Path(dir2) + # check files content ignoring newline character (to avoid crashing on Windows) + for common_file in dirs_cmp.common_files: + file_1 = path_dir1 / common_file + file_2 = path_dir2 / common_file + # ignore study.ico + if common_file == "study.ico": + continue + with open(file_1, "r", encoding="utf-8") as f1: + with open(file_2, "r", encoding="utf-8") as f2: + content_1 = f1.read().splitlines(keepends=False) + content_2 = f2.read().splitlines(keepends=False) + if content_1 != content_2: + return False + # iter through common dirs recursively for common_dir in dirs_cmp.common_dirs: - path_dir1 = Path(dir1) - path_dir2 = Path(dir2) path_common_dir = Path(common_dir) new_dir1 = path_dir1 / path_common_dir new_dir2 = path_dir2 / path_common_dir diff --git a/tests/storage/study_upgrader/test_upgrade_880.py b/tests/storage/study_upgrader/test_upgrade_880.py new file mode 100644 index 0000000000..36dfa7dcc6 --- /dev/null +++ b/tests/storage/study_upgrader/test_upgrade_880.py @@ -0,0 +1,17 @@ +from antarest.study.storage.study_upgrader import upgrade_880 +from tests.storage.business.test_study_version_upgrader import are_same_dir +from tests.storage.study_upgrader.conftest import StudyAssets + + +def test_nominal_case(study_assets: StudyAssets): + """ + Check that short term storages are correctly modified + """ + + # upgrade the study + upgrade_880(study_assets.study_dir) + + # compare st-storage folders (st-storage) + actual_input_path = study_assets.study_dir / "input" / "st-storage" + expected_input_path = study_assets.expected_dir / "input" / "st-storage" + assert are_same_dir(actual_input_path, expected_input_path) diff --git a/tests/storage/study_upgrader/upgrade_870/empty_binding_constraints/little_study_860.expected.zip b/tests/storage/study_upgrader/upgrade_870/empty_binding_constraints/little_study_860.expected.zip index 11b6e564d44485ae041f46ff53cdb41fd95a1164..f78b95bcfedb0833d8a5655dcd413a8f34d1c7b4 100644 GIT binary patch delta 6539 zcmZ`-2UwKH*53IR1Q%QuSX6cq7XpHa6vc8)jG#s%7Hn9e0`{)>$DSf;Od*kW3`eDi zQH)g41+mB25=#)1{KjjEL`~G#P3}!q@0pp8-Npa@K9A4Gyr<5YbIzREs#`9rZ@EBP zyOtQ1!73nLYs@yC1xtoIyy@(V6LMG#Ja8QZ^PGI)47&{-hq-6}tCH9fRh|vjc}@!B zIFxCvL}(A2nuRwifc>s%Lbk$1$P$k)JZ!cVe40&!uo45j*t`mcXbliMLI=wxc;nLD zED#!Wt}rBd5e%K~1pk|~48|udY^Bv`Ts7V1rjKc!l3ut%qtTSp|40qS)sjH_32rMQ zX3m?L_|ZZAktoCJ-`+@%C{I)--ROA1kY0JRrtggeyj))!!g7b~?ltc1?IA%Q#a!8T zJmls#(_)q`d4FAM^Oy?-Yl`koJ(E1P``s+J=-p=uoJxJ>WMmI7>$&6icE5baru5#D zki7TT;bmi=G%Oo3NgH4`Z|!>RT;1pk-3Q+O{#?3~m+7zz3|l`GECEJq@rFwZ+}=D5 zDmT9fSw-@jmOl+<6n2C3t+T|pqWCpvKg>XXZ-#_VI>EtB>&3UefPYUx-j>eLe)Ago z?h$|c7Oa7X0ebi&uO+Uy#JWPmh;V4JDH_@r4|XJY!jLV4Vdwf^p)k(?LrPrm?*$9{J2A69LkfhbY+O?gbA;4g*%D(L^ z!1=3u(DV(9tX(APspE_K#_BThoj7RbuWt${u&a^Fn;)eMpr2}N2rR-m{P@}AazlHM~(RANfriA zheyC)i3XeMzZck5uQ*^+J;gz-8SfWTWUGc6ZTjb1|BwEe4r)_WZx;bMiDjo*GOpcmPOex>Uqky6J~s zo?;;)BYG`d4AMnUd(c;>W{TbZV`&RqdxpAD(b(43$fZ#V^jy{zBA48S3pGY-=FT-* z_^Ww$p1qK)FSPd8xjI35o)N2xm=lcj@PVasd%#eS1JK|2CGQ7TcS8p!*c)Rc&_B0; z1vQ5DEuU!+XgVW!uKPsVc)()*gb0E7S`q7!lj96qjk85#x$YAP%gtWaRxSV1a)r|5 zkCtap8)SgRfxn1CZm~%RPXp|EIn=`oQiAnR@WMK4ZIBxiOdH$zLvql_#={}kyr3w+ z9hYv9{#n}1BBc?yVjJ^}fB{ z*hzLDQ&Y?kv7ozJHd15KL0ftVjZIS|WqMn%mn2s`8)7G!p+lm*&`GOfLh#nXRllzm z`|D~U&Qps}`j_#Unro!Zo*ZtY^bnTf4#A@qQe}GJ{?DoZ6^#wVmPc4i)E%`IW4*O@ zX1F3M(pYnh--&oLgKgp$vGs0E34>cYcdKQO?mTR+eH|i7+%Y$km0-(XSRh@A8jQ|h z;TW68-0|u?X7FsLu@EZ%J&8N0(PX!2kEzH^=vT+|TE;Zl6C*jG6a1L`nCX>)3_Y`1 z9M*rsoG>DXnUs8i_+WAl>w!^+nJXU5Cb)~j8GZ16HhT%93W<9Zhx=#+3OY)?JRH-8KCk*fGfCRiEC?+*EL3UX zB7yy`F}WWzgVeiC1uI-S5g4XayGo{q%|Zp!;C0NTUC>+ue$BcFU|cRUv1_v zp9v;NJuORG^~w}?+dLD>=NX-PnoQGLqgnQ+;iV5Rvgg0Kbu3I9-a-TO){BER8Co^T zfyFKGhx2T*a$0r1pjbG=wvEAJ=dKqh;MW}}*!u!YQ09B!#7lJO+b^(q z<*-=zgGYWK#%^UE*#8RiRZIrX;*V)p*dXk8h1Be~Bdurv3dV;O%u_Ot_p~v$g0y^T zdq4vTJy9u7E;4VLk{l?7UQ$L-DvBK_y}WIdSMdxt-QbNsTw)XDqSVM2;ZR>7Wdz4U z9|`_8OM)lAC$X1X3PteQ77v=b90>om5oB5k?#fnnZr=j2JwHPyhrm~_oYtDNOv$Ze z?$1$f2}&f*ZU0qpmM<5Kq$y5%G3h+Hch?u+KF>yos?uIIX&|n|$mhCvrK1_Eeqer( z>d_qM{=njtH3CRg{*VNG`5GIk&Gpm3xe{Njyv7D;@5==Y*d8uUUjHK%!K5~V0}c}S z1y#zkHaPSKo2DJ@&v7$;dxO-RmJ8TZ8YU!-cYYz9|vk>Sk}hVRS6G36FzvrmBJTk#Vcp?z0f-2aJ1zhptoGayuhhuXj2*D&{2fCVj=}pnel_$ zROO->3Vlxm{E8G`5X5=>VBC&&61XM+nhbwtk-6ttpfVyD!|T`>g|cGjIjWnyIy$8L z>f)8JSx?zePuEf8u6K|;f7cOAn=|5pv(#=bM041Jch8b&Oeekw#tHRolrp)qTwJUt z3+rO|A_T{sqmn++g)b&!+*#85TUWmD0n^F|hfRuRKe;<6dE?%%S(t+-)e?r2+1(r@ zgf}HGO9>jcM=Q>~Y>O6X_?EeF>mFG04SPk2iET=opiGSu2oz^UVd{^RR3G$~z#$Cc z{lkPUZqa@(s;I~MC~lkv!!kPEfqgV^DIr2$Uq#i#x2_Mea+ajr=z|l>*+6`DiaIY= z5aq?b95LhJef;`3L2%lBg3qaNMI|E#zM%Ma;M`+gk=**@tP8JC+*3~I2^HOVjJ=BP zm0W8-!AE0hr0`4J1=fDZb~G`r>(3ENfrCwq7pd^uCz}`#c~x-3(*?5nrSXp?idA1U z8rKbwd>s=VjISDian)q%VId3Urx&RFQzGzibOc&XQ0&CU=f`utfew5Aw?NEz>XhT2 zrwo+(vOa4x`%e|AIL&%t5szf%AqvH?LAZG*OGa@Hmj+=~HJw9;!Dy(VzE97M1ZCh5 zzL=}bPLLqIIHIwrh-&qI37zJLLp4xOxm;ArLbVrY@u*)L{`g4H9vRCQy+uFiRZ0z^ ze3GC-k1oL?>;_MV#`+{KcQyv!VH1=eXX86}*fgc*9K3sn^|RYjU=GgN&c-PXb1Y4>5`zdTqGA2B{cHzsYr=lhU1P>J-x9EvrkjSo;pfpcSbBs7j8-#|9)X;?CKU6+?0I7zv-jb5^@^m!Z`fUNzpc}$3Tt5_s-;*fP zt4KBcMCI#{7%f`;;uT!Z&z4n2OgK*d)k(N9wZfFCX@U=)jtQ0e#dhyW)Sg+w>iCsX zr^qa^-LC~Qjwh8#8Mum*eV{OvrWWc9+ViV4U|3@0PpNoq$ZEc5i?hC>{21_#TwMB! zb(9^e{s^UI*;kZRv)6E1Uwg}XE?3sRi#zX9x0`yGj`XrvOjBasGt?0k>-VW#ldoy3YqTWn+~CV&RS7KcETo zWG-#OD_sLmn@95#4^@)VvByT1=-G6N`<+-e>009ur05L(kT2romG?6Z_u3MHw8gS} zG-ZW9W}U@7qD}vZGqyqU-zCSLzWk0nwVniik`&s9w*&vBwSphwTZoZbkEI)kw_Aop zXH=`a^m5fJ#!MVq$Xe0sRowF?dJ;O}y<(uKFz7Kij~ZcoKAnC@ z78mvtXjVR<;*C*WK%-Juhgz>*`Ajn6(>!`8&;=Q&^!S*I`a{$LX%}BEAaB+QGy*Do z0%hTl8F!2=B+^qsO0t#xjE)}IPp^o+n<)_!!~d;r}$y7%@np`bz#}U^sp~LWRwo; zwlagEbCUqMeIlheBUXIE5|s=)(pxm8=cZ4uEp`d81d2R%2*El@hP)R<2y|G#h2Amu zRWv!s3;nm!cto!FD`U0_RrJe%7y1>G?W+>N%wp=tq9MMvl{P)SxRLKA(a`Myzb7dE zxO@ZMXJWeRvyBw@-^K+ZAf=ss=l;r0fv#4%e=4cpg@nsCB#a!ED7t-0JyFb3Vv6Xu zT>}@B9i|9vCLx~Fylt5)ZuP6%F>60vtd_jSi?wTsM4lTd&Is7?W*t!~8JkWMpR1^S zn&?_4_?k`=m1P)ph?zw?H+Fn=rg1J8r02(qJOPgo7mJu?0xP77rUONmL>&eS=Ss=f zWT1Gfk~`(gfAL}xWEp3lDzt-k@}uZ2l0uFa=XYY|=QLWh+lBfgG+NM4CHAIoQu2JU#zfdzv+O5I+8LWe?dmB8PH<4^n ztAPhK;j+*zkEmIVti_ieQLh#+f_sFp6X^Pw#tP#>tba^**arvHrc?cx*ry6~2(Bhh oZG8sAqDO3i{7K_1_aGsYVL>LnfK0zKciqj&8cnk-Mn3}l2NNfgJOBUy delta 6433 zcmZu#d03Q3(w~0c;c&oVkP&4V0TGZ00vbhQ6w!c+sGtTTCMt=mQ4z&E9*Gm8(TxX# zC6u?It`P+U1~rKnCO&FBqHd1;)Trdp^~fq7$&*diANy&@jx?j` z3e&@NKTnuZ=8XwgSOPTe@CI8;6I2yXax|92OLB9`x&I=E`eI+GSQ0H%RuzY!{Tgcp zM>Zvrrf|MUcFZn1uL^Z$iL}D7Yvf3NNj{ebafPj*euo#tl-(7c+JcPmd09A4Yhaxq zvz-+V?J_~+_Q$YzNi$2$c)fu|kfpOUHa%SVFa+#7Tj7oGn^@xrxb|PHa4;v6yd7U{^Fdq#Cw=TaOhrx{>h<^;#BHx78i>FjPe z|2XaYOepepoX)=M4V!b#kh9wWGhX^0Y(aWxNSzC&^k7(>ngL-0Lm@w9K7^&wcR(5` zG=XiO5$>i2pyd>WBq22pJ|6I<#u$wGhgck3jEEHyhP}fjlo0o1H%@;%`Z+n9keUQ< z4q1bZ2Pl)JX&Fr-Oh=nUOyve({b5RPY05xhg>gu>u)vZY09$OML_Qj84DjcWN!Yud zMZ&pdryygg8Io%aj*Ov+4EIX(s#B9E1o;G@6^*>`#rlx{iepWx!mhvUFiwWbu-s=8!$hx;J}O>e|GwG=JHzq z2{GTM?HqHwcwO|tD^Ck%)E;{^IjZi~lcIFL)uVp%xzK6l%B9oBuDde%wV=5kJ&n4| zbT9A7pw$K&*;oNT`n(GlLp^hQb zeHpCUZ-%@`17t0@DI}j;G!Mou$|Xs9NLmO2t6c5_Ll-+8*25B;)#-E+yk1ZU_9a$q zIITH-Zjn_uJeI!FSuV*6@o&#}lje_pXIU3I>Cse*QlwVWal%sjl9x4mtinq!O?oJ$ zaS!=PleYPT^pXpjBRwsc4IeK1i5yLYXmb>tUv|=wu{7N2@IliH9Tf1-l2_ zi7`1^_Q+u{Hqu0?@_{ds40ykWwZy<`QRHR_?i<9BR=B*H#nKV+f#k&V@NM@P$Ij=c zsBordkfUAHIS<&EBpZKae;@p|mIsBpYSMBc*6~HsaTPxCHaX7pyyF3@(oEuxsO~9#MQY5N6q_-R4Cnlf?uYNjt9#b9QF3aTWi>CesMa!{mAf5QDCLK z9}jHfUz}LT%JK3=X2wN!W@I}2z|KZ0DLTAiXQP!hIxJep0&qwXi$i-E)8iX!nMJuF zVVI>xW9CL?!Yx4P7AA>lFi66MMJzYwuO zX}|AU=pg=sE|{>1S!nvu7b@K7`7EgNNY3q>hZ{RW>D?g&Um{KyzJqW3nSVtGmH9* zYXeVSrkFKcp&~I}CTU&FQrdWXqV$97P+GLcuq$k`I-xb+&2UQc#d%kW=e$5WW8GCL zDqQuoaJtSgs3h7YMZGQ1U^;CPm~(|itNnw8GUu4pWT7Uv!nHk!n;5R%3w1H@!7Bmx zeMXk^$S9}6u;zQ#hDNV&S9BChdyZOh(RDUNnWHVnH83OGBf9FH%z`1?iTjFcE1B1* z9z$?-{78mW3OEi2{z#79{*Luix8qyRvttuK{JtxOpA}y$y+M6o4YzJ}({>V1Y$vsK zv<|ALbrlt{=mb^k8_}3>f{tG036`O}9m^LJmGub%gh6v71)JjNhX1K&ud2H4I=GSD z4kvA4-LcPcs?EE7BtoNThWgb2zUVJ%h$c}wg*WLG9?igk2iPl$S0b{pYMFM&2jJpR=&xC>zh`CZn=s6eb^bSFL_CN(b+z50Kubzrq(5 z=$!8E?Z*m!`IH)Le6w?qHI`EaLx`_e^Vu5P7qbXV zTu)U!Vw{sEL0053B=l?(O3xUdD7iC;@1%@((i?C!(%Io{!c{;=#^dgrR6w3N$ZnBS zqjK=|Tki2xDg_cODk$tOi(#*GieP9~K#E;Mn-!1}^@gN%7Lc-40?!suU(7c9*PQmK?6z(lk(DZ1sZ46r_*#WY(YHl_0h_}Jbd>IUGEhO!m(i&0qX@|!Bsn% z4}_Kn$yR^%6e($xTkw0Tb_U0BbkcM#!9s7_j$kN`BeZ`OrqNl8$YKGFD%NO$?5)r z!jO|+GA&fZZL}k)7BT{DzTAq2WS(&(P=LKvLSFgDAgx<(GoY+Dy z5%)-J($JW6_edD#3R=8>meB>tTdlC7Ov3a~aSw%Mo)o;v)=Bp6rrB`&JS_NvjaMJf z)4_^mDKgZQe~a4sEsliJVDHMkMeVUbpm=K=>+DLtlD!Z!s@Q0C$3h(p?`p=QRdnM% zS%|iMY=j&!lWIxk9h=;qQA;V|Q=S;h)T{yyhw?|zV6{dpGNmQQYKW`hJ-!`c5Oej&APTFTKx^D6Fny{&;sYi^2O9edeeHW+QCLPjkxN{4Z5{{ZDDUq zIgLHszyrdub{MmfuH2ST1?Y$N z%`8ZX-^^*J;`&lD<)T=GKtdZo{xa;2TelIqVhbk;fvVOyKl1IAt%BC}x(?%q3jtvs2P72ne9ZYNVTl>)H;N zK+jjr<($8{=O&zPa%w>Z z@zz+`oDR8x=d2MtP0>og?j}l1-o-{K%UuAhZ%30YeCOI} zOh(Qg-qkg9YKQ0C1dpl`66j4I%M;-%rye2vOcl^hEMvu?fBqg*4^FCuV>d~A< zzYs_&b#9Fg_PpPX--ZU4G3p6|hc6EX^Kx+*4j*m<=ZKkUqsKF%u96C z7YQVsKW0a$PsAb&(pE&ck&JY3rZ5Ow9e|~mSU>d~0qn2tQex^j63$<K)E&c#m?Js*>8D-Y`M zUu#$&-Xu-v^@twu1`L11CaR+k)2sMMBGx~mS7o_anDNg?>;(+86Qg+zD{_|V&y56J gTEk$`0r9#q;dYYEYuva9f=<`N&dALF2iaR^t^fc4 diff --git a/tests/storage/study_upgrader/upgrade_880/nominal_case/little_study_870.expected.zip b/tests/storage/study_upgrader/upgrade_880/nominal_case/little_study_870.expected.zip new file mode 100644 index 0000000000000000000000000000000000000000..ddeb10ad18899a88440167951bdf193a96b530ba GIT binary patch literal 135401 zcmeEv1zgnI*7nfSAP7i@64IbFNGM$f(%sTSGaw+{2uKJB3IYn!Eg;<`4Z_gfIUw~7 zo{-}?$E)|Ad+)j5JN)<$=rB9hv-e(WKYOjU6=e_*@c{t91%Rt+po#;}qL?A{Urx{m z9s1ldv^BG{vSu;0G`-qZgC;DD<1zVe3+oW^9gB2p4JVIu&oje^9j(DrtC*qgZthmj zK16N_u}?&aw=mpZdf>|H6Wt0Ya6qx^^2mCaF&US<%_$xw-VzYeUy&VxsYl(^7#paDd-d zRHPITl?<&w5DoyK`QwVLtnExKOkH&COsy=>stF0HL91oMxwU=}_{ihQiHG~-+nrpC z0SERm%y{pM@4<+>ia49s5w?wtbZjR0Q3JSKgsCS#r#Qr{y}8^$ zr5bdPcP`>VAH^{de$8+@yWa$A;%#xwz{IO*Ef21T1{!O3 z`R-=XWE$+w_VFgu@=sMR(q1w}j^*4}D4@(p47B*h4cFYg@T>j$@ z+M9ls9!p2?>>I^gCANt-h>A@B0L-6A%KUMolxJ}_fjvH zJllkgjyk#x5JwTv=!kq76Bpsx+hha|JW2SDDfn|z_WB+;IsO7T&no(daB{Hz@51Q| z?cf_rgl2UpjA4HUCkN}-a9-z_dV~1Es8Qn=OrJl=gNus6ir~!k*gB6XU zm}o+Aee!46a(<0%_l@iM(Amm?AtzTBv2D8*G!^6_K%uK+=N+}?nrX$B66JrU$T~qjICOg zq(sisc=dXvNu(FH&BS-fLQ6JBb9NzS)vh#+}E-o#*wr>T+%^9E7$7_lhII zHE27)7y!VPUpu2`YH46K$X=?}6HMO*}W!2NyX4f{fIqMu}Hzxpr#p_O^31x7a z_XiEOXXxU#H6|=X9Y`r$)Fu1fj(5fcGSe#0BFUkYi@3(|Jfk;*&6w* zKttu*TUr$4nOfSrfihtWpn&#*>`_e_SuX@aj2m9-m*FW zwCo4JEBkCv11?;(q=d?X7zF^}`AR1GRu&dkmaO_vS?inHIkV~m4Gm1~EUXOd%|A=Z zR{P9+(`NrXsrsOkO1|#rN#)s&q>o9(49YJH-9KBh5P!F1{fl*fk@0`N?k|%5C+glQ z8+iH(Dr*+5^Gn)KGyUsPZK3OAZRKbPJSXne!87dzK&T=16~Waz9R{Ra?}W#f@S(o8 z9CCA-$hmm?nUy~8$Y6?v;8uUI;>CysHC=eT*!|*0hat6rOp+;pxO6yjtMxbZ{eB_v;awXSx*MLOKhH~ zvRB~Js`1OwVxfN#0l24wrimmA{R=4%1~#8;c-lu@^_X8a1!ptrpYQQ!kpEBh`18>E zAMCMxTZHCA=%{vw?xWAP(LY&ke>|#zhPH-42P@OBhjle224XdISS!?=WKx+o^1Si4 zyK~~!DV_Ml{*&%${N)@DSIkrKSy56g8GkPxe=pV7*t!AOnDz3WLVbRp6e*U(+)Ts@ zsYY-D{Qu-D|V#_%sK@w0T#mw09+`4kk#ob9uGp}fzfuX_D?j+|LWiD4pG zwUjuwA%aMRCOr>Io~N=KG2KhNfFW|NrdNj1mnGm`$v&%<(Us{BMHeL-_LgwSBfNOI z!#8?@A{XBC;0u@!1-ICR%XI+nOawrdteMurlzBIe(zA`G*WII9@aSDqtoLt0^njv| zo5sd8O^r2rHLo$&1MbjcDOt@O=w%K#e28wBn-J)|Ws;_n;`R+Wf7#CbG{dcdhSq0> z>SvRS)zH!qXzYAujhvUmnUzaz)Mm#B?LPzb`F0sUmH63UWTBU4rf&_KZEa_iVs7o| zVr*%C-6z+{dQG}R>dgAR2Q?nMIcI-k_I^DuzEv5=pEUT4;rZSMbFlsg8_fA94L%!b z-`il$f3?A9l|AYu52u7ec}8VYL!a}L`R5^_Zv_Mz{$!GW22)<4Lxx$pJz%!ALqSR@ zS4pzHjZvjekvzt{`Tzn~Y;GXAw*aQ;~@+Hg5`Y_I@;Zf+C+{jVd| z+|<(S*P--z5#xaJ{6Jj`L#P6>J;QW$fE~?)4F^!8MnQ9Mx{Tl@L~!J>N=Qt0UeBNh%-wyGJ!(w05!WZh`CA9b((LXbD*I zDCuIFh2f%T4E7(OFR6XE(HT<1?-DYnhKowW-hhb&HW*;j0Jbu~)#fGJ3rX#g{d zV0I$RLWb=P{%>rTT4h>Me|v)Rz1vn!*tQk6nTF8_FaiZepuh+e7=Z#KP+$ZKj6i`A zC@=yAMxek5lz%sY^80P;-GCnNIH=3ZJ}2Bc@9p25w*KtE`|D9Yhqi$!&`|%g)A(v{ z9wszk3_f>EKltc0AwZcYDj=?VN8?%`u7D>z8hx?{?Bzc%10e>L-;xvOQD*<+Gxgu2 zeCDnX!};$#hQEpPmspH%&Sp;7Z2r$r&>vbCzlrj9&Sp++*lhkGq5l{D_%}EQKz;XO zdqhw=psV#8sPF6-zqsGbW~fW2wIR?Bs+-ST_f{3(S=O;(jP71K@px#>=eSB}fd1Gc zdLi%vGb%Nbn7j6C=VX1~P)(-_8tf};h)9z_o_NI|WBlcag&lS`cywZUhL2BsPwu}- z?tpW7Dx9h3EMF)b6>^LI_A;dx&DAKOrVHpP+?0S9(^e2Rr6=@RknUGu$sPasO6>{X=>k8Zdn3+6trBVZ0)YSA_A3 zFkTVHE5dk17_SK96=A$0j92`3^NKK!4wy#=%%el}pP<({e#wvVE4>aQwknYjF9^wq5l{D_%}HFLvbz>k14Bx5^_q=1G7IQ=$l>lJu}}0;QG;L67b3;Inh#r5KvG>4dl*78MOEwO)5U@nYStm)<|#o7Kw{k*qN; znsqBnrsr)bt7bWHjYvbhI+a1}%>AY-KuOjm_u4!nIl2oMEt#!!DJqiR8v~unZjXgT z>+;8HF^fTz>HK`WDx@a%LNeLg_2pIIjxOzj<7d@;Q*K9(NTbZbM;Rfbog6}lt1Z+F z3got#J=)P80<%}jhIaNAdBdY4 z+(Y2R!?Q)Vr4dA6o4D@5Zk;*#II~8_`s+kny;nGIQ$P#%|1E9$`$`OS?R3wI=lq;} zNQ|fsAcn|-q;IY3ZymZsj45U{O4s*Lx+U)MLC|Wrayw~PkD)8xc>b{@4trYPkEbQxc=R79fHo+(8APy z8fd1c>Lk>2E4+oNtv)j}PYpChimg8MC-avam1hY?MDO4ud{85#u{zDf!#9WTt!SK5 zX+gTwcvGr#*;Omdc6ZJhvN@h}`22)*qh?-Qr7LW{X}iF%30-UBF`)&V(7x+EQSF^b z4{V`N_q7X=_9s#l1|L^0^l~9lxUOK9eIS_d3aiW`Sh<~4%&803L|}CJT#20hn;D_PJUpAQLrOB*uf_3_!o8}4Li4oc{jj(Kwxeu zFy|eZixbQd4Cb2#^G*9d>6`X1GOR!OCc_w37{dxN4WWZ^~fJ0ZQh9}7NKw{E? z{R)!=-%qYWFx3ZBeK6GrQ++Vi2UC6jU#PzGw-0>Dn{R7nt_%E&t{*ttTb{pufcjU) z_>XG&;f(iZ#Y0Q{^#*~nTMm9=ZU3nF-=)0&n$M>F=Ewgm=l$1>_`*T|Q6s*Q^!}`% z?}g-_zk}dMAo(XQA^2Jv--6^jZXx&S z6;RjM@K3!BI>Ok|YK3ubAM)OorzFlTEW$FOyVy7;MH<)cJtp_K#IZ!C8X@&)y=MMY zLUwf#)h*XFi$%%SOpT79#cN794j(&<0vI6IkNg1NkA_cvd?hT`+U3v#z4ayu{oUA* zI^Vm4dHQ#i^%m#r=|wrEHvP7dhhv?Zj{+~EN^V}Av90n80JQqJ*w^&wD`Xz;RzXgG zx;E=`&%~iUL$f`dbcgn=9{PMo&%U@nSOf}UJyyhMf;@kWM?^-dNh;tO&z>f9q9eCR z^GbMQM`-HS)c#>9=BCMkZTi6WkaGG`JMM;0FhBm0;YARQk3qp`KpyQHuN2elP%qQw zn?&OQ_kQ;3bf&626JjZN8AftRSy7cscy_Drrp0H4QW;Nb`8;%v?)UlR2t8gv|8j2h z=e`%Ve}>PW`(B*CF_7zv+Wk_{&${;~Ck$3wJ8gDWML7`_W>d?DhWd6t-;1{T<))F~ zqtk={WuB;jxb7W|Yk{}|p73b@g)8%5Rl=kF=GwqBO?)0~?jHf$(bUew0I2JzYkqc@ z91Lxbzu}nv9opwDYS!;W+tkv?+{)4RE0kfkef?eT5&E_b#`B#hLm@T|2C`n@7D11aDSh9#sM?W{x)!a+X($8 z*xzTK{R}qDJp21Y{yVgP+&tr8g_&pnOXTP9SG4x`>1!zC05i}2ruNu3&9gJO&+F^& zg&Ss`{T;W{f26yAvxfgL+|XlFJ8ffK7np(e_t(~+(f=mgy#Hvg{ad(UCfeU0^xxrq z-b7>nZoFYe+W)eV#`oPg!_2h5>v`r6nQ7k-ILuJ{pET6Y&P%753?Vn5Gus9F5dJ#D z?Mw`T7P{u2Bg}2jV&#KaIvC}dugNe=OLahBa%4MMSpsIInWW`{x$oyHv2}BLSOqKJf1SwSA4&SlG5+~<2w$J1|D#g={1`ok z@{{R2bX=Xt0sw5k8(-EyLu;VbFNqm`9t)qh0>60Pt`4zdLER?AJ`iJhaY5db^3z-l z@g|jiCh_#~^?{P6&P}yr5b1o~r{j%^la;qc8F!8nh7`SslOzwMFPiHhkSN~=3s|G2 zTepUY?1Y%&JXEAb$DET#hXeG{XJLLFa|x#yd=M%+98%2TEpTkJp_`WZc;9U=%~5ypNe^^$&#Bt z*4hdGQ8!Q+^Is>DTyWF5dzk7JsMb5ql&~_BDcRzot z!rbit2i@%dU6E_Ba$(VDu;?>b^x3~N`mCx%=O;HssOfM4aHbofzx*0q!|^+V;w*#uT#_rRKB%d_ySY-zxndMjT!J)~CNW;OI-})*ndm2jY#sfyQ4KZuAW_{(@+u zZ=mrV!A9Re#&GwloE^{KWb!4?jr^_g8kvm2WO1j@C+8+-_l8(CXq#FpYS zq1jm8^!n5uDyH8+_o*{HGF%+>f*sP+MpM-Eu)$UG&;#1-w2aFrCqCjW-Y5uzQWUaC~ zef<^sN$0D4R%*lT*?=13XMuvt(Zdc5jA0Q%nF0)0-8Vb^8?K-y(Wz zri3e$b*+(2k$7Zn>N4Zj3qk+L8h9Q zkbA!MDQ@U4ieW2L*VKVp>^{LV=~ku2T`W*gPEZ^7_!pY+Op>wecakq60szLS007Z> zNg6|w-vV{bKW`9fSlrLxE7Y4d%{{vU=`HUPW`s4*&Qv$=>Z< zhFn66Vg=3+c;Q?Cevx2!dg@B1^^27Ln0{q4S8J~YF^=_`I_e`!%BLJBWB%qEMQ>s! zl7M$!kCL=FvI)Ao;+`^SD-l2Eic73Q8hC+JKD>^?jQ`HEY z&i%T_r1Yc_fyZ6!v^i{6Ie5Rl`-w zMg2kO&Kj!(5VbKTwc2t}^Z-v;Qz$*$NRWYluvDw7#mInR2lrW~+pTQrM~E@yB&p>S z%n-*^E%T7Dq6)NWJrlgE^;~^D=FZg_id+J2Y78x#=4Av?9Z z#zYoT?GPs>Ws{ud7@bFNDlg-`@M1{(6!p06%Am%I|HQ@{89CP_X07Ar_qj?lgBd2m zBKep4TM(6AamAb*C4F;{03|+o!VPpS;)kz#VLj#2yB7Ria>?&S+?y3xL zz4ZGZXBP)OhGT-@SC4KNrOiLg0peQ3%z(Eji{Bj3FVMW5xFM*lI`h0dR4m9t+8mX<)YnumT8^=o~COM{{xH~69kf|t=;?fIi{_N=RmjN9)Rzww$8$tCugHfy`e zMTqV*8H4=t@dsH|od&~?n*jn5^3L{@LO735NE{TYz|oQL!(_!*86-##uL*0&sZh1N zB)G_J^YBG%iOd=snM7O%e^(bWkf}H>qm8lblkb%o!M9aOA8@6+zxSP zTeqEq?YZF14$X_M<;YFCiMH-lzK=LpCmbs(_9=e5j+l;EP+gq@0HSx`0Ho*F5ul;1 zl|4`&nrhnC&fdV8#Z=#Fr4q^MzVaZKz^1pilV~^Mg*8RBWYLkS?kPmnHS#N^h$R!= z6Kgezl&JWQ^lE|8#gYunuL>1g4+bi`hB+l9A~veJ4o|TMZ+K&H78obFCB1^EwHW|8 z%HocW<#sr0`x^QU9_n$#QB&8C4}RG8TPVw*--Og^$I2#(%BFotsUOeW=Z{Of-Al57 z4R*g?V^BUi+;TtmQF=)a=hXHTW1&0bwCBEy;*H)hT#NjRYx68w6f;-NiRWm{NoMI*hzaRe;z_AV69^n2%~=VrP?WgG$O0f-1mpZAUMrQy zyW=1ewODHd2l+{-yU*Xzyyrs2+W<$?!mVqLj!DH1w%qnV9l1@UIj%(Xv0F2CxKU<^ zn1V93yF4F)mQFZpxSq%YTAv`67Y%wz`(heJUes07@r6QEW;X<#`vrwygdbASa%Wo| z7^Lfb1!Q$s!e*@W+7zhix-}jbcGop?idEK~gHff};Qw>Z>eKf*DLbUI!#a-4* zGSLIpS0}Huo37UmJbmv7*{9Ssr(xA~0BP%vP3Y*NfZwTT5Uy8Ny~xP*2EU^oP$E)m z^QkH*$aVHET^raJ)~z3WzQPG|pJQo%IM0fawbt+`X{bF6tS-C%NCtHee8YxZG)E!P zhwD{>w>^T}VgkI+^Q2O=UNu9*ac60CZ8dI+)e?_fX9M{WF>)6xsY!4s$4VuMuL17V zn^H7EA%_=!nQR%pD4=z2TOg`w0T5l2u@sF^86PJ*i-BoWvax&$M6q@(EJ;4aZ2n?B z0cZNEtY!3VB30$1&L9vn8r{4fy0PgP>W8Y@5s_i$^)XfPY7hn4emHnpX7H*#XkMAOzSM!w+S5_*0vU94$YSM9>2){P4Vpc$AfA^G0_w0g(%6wt zS>n+!2wSd8>=f%|0mNY4Li>S05~G<+PdNpu6Dv9B6(p^ror+Yho!Oiim$qKK482z! z<%04c0fA-DBP%QBmVsjK8Q=A&l_9YWi#GA&2Sm6ql4RF*_p zyA!(2yA9YYKc5s-QmkV$Q`pXKVF=_M>-0Hr4*HOcHQpoZ0I zfSiVRRT4iZZ|bA^2%)%s7W7_;;=xcIvn0)ku${+-77Gcg7DZ+XrgKbo^mI#<2j$_* zDug!=wk>4%pSZSN)3WW6i#C%#q&-Gfy^@`%U7E|(Q(=`QsBUbLeYig_<}Uwmi6v`k zp4t4BS(O|9qUb_T*Vs@M;qC^_@zT}$!EFu=l6)LK0uIcD9}v5X@sHZIbc_ReQDB7Y9sM zQA3#LgO}gs#-|&tCh8-2zp8W_{&ZYr&7IScb5Q(2w>APSBI{dFBo(^trf<<~t6{sM zUbQ#1mMyv*LEV`37Ly--kaRg`i9-0Wvoo7_htsYJmCvfKNQ$DmbHc0+tTGTZ!!V4g zPvXyqo0M5Pw>fpAscLv6^6}+?jq(%ia`d)C#E3^bmy8NGFL7bDiRW_>7MNW>j2`eM zudkXl%UN2McNx*k%-g2r*URi2Vf^$7J@ICttMWl*&FNSX^$Nkn2fwuv_*$jmk#Q@Q z7pfrVEWZP-X9GR=99QM)5aP!CZ;V6}*z{xvb83c>+I7_V+(W1F9>W8oF&_Hl7aceb zb24a@itG(L7FrFNbT=G5%b_Mij%UpCdUvoZ_OR?j)iF`0W5^}@p%KrM!ZbW}M+X$` z$&D;hyCf$z_3#|bc=?Jsqy?=oeDVISYr&S`PtA*Jdq%Dfk3bxvXpa}AA^D|y<(^u# zqoFSo8{cnFkqa4-`MT+^Y9*DkgD}hx%%l>JhaH7D=&%du4n|@gTrw!0yiq=9J|3TI zRpoGbvSuwmut?XcbD2piXYHwmxBP74uuHBwL=H`Nkn}J*2QwjEwxwl28_a!+M*%`) z-qYrgg{h7QeXnsOm*l}N;Vi1b$eWWYbGM-+#zUms8*{m4&+k<71*@P$B^+U?6(s)ueqr2r!tiay4HfKbD<0D>NqKrlDC9o#10L@Y|Op zZdMADBQAVa2O}R*&3Z<1Ke+p@G7ML^6puWEqkc{R*)iVhdFCl1s$Twn{|3>=WEq**MU1$G9fQnbF4{Z~Ey@!bW>TLq zV>Bm-OKe`fdR)}@407BInBOvEIdr_Xm`lHZg=_U{e*bl|$dd|%svR*ttNkf|b)qc& z9f55z(^W26$2$2!UDiA^`SL@Sx0pAD5X?AFBTruxrARuaUo$VdVHei2ay6@C6?uE) zN*mcIgrHpfj_cU<$d;?*RF6+5Z>r##(a7W8#{M*x&ExaQ3@hVdAmyo`gSCAxQay`r zKfyHN=8Q76FMIS6f4u@77p~hUhK>d&pG`2*=YQvkGNKJ>$xYaXd3gPTyg7p zqa!s#RZzCwb#rzhI0KE&5v$aVf?OD#wRXHKh70%iC3EQ}=tP>TeQz-#7W2;tt~d@6 zh99QRv!=?ThFq=0djU@Ru#Qi$mp;^BS1)I>{#b9_ei!n*8Uk)Tsif(ATuC#KFJ0vr z2vHe-m*WPabCUrXJ75`54U_ar(Nf*A+(*f?89CIfa;y(&pAW6PCm`asHpD;0+iS6! zc*I8Vky;xA_7;dG{~K0{Lnho8MG&#cS)?h7mTal zI$DWh!mjX$afPYB4E^;+5`&Nfk*D-Lh#gI(z+1~nC(18rGsss?rp@IhF7vhGK zM|}WpN%CQ)TumHC#Ai&v5)pRtOCUrZ>|P_=tjptZToF`@+I9g+$bNWHc`pI)_eGUaDIuPu!Sf5Q=pMg-y(N?uGK8f8-x70JiI|`;RL$ltl`f*{%74 z(ubz&O$co(l&#(uKNkzOekjAfPF-NUyY8qyFVD`re|T~3Rpq?#rffWjPM!RdW{7i2 zr9!esgqD2Mz@v~83idjwC}W9sd8%c}_f!-S&paz>8hr&W%?KDI9*s^mwO4~%!t#vQ zZAZ#3XbSbrU zai$sPBdLT?0vJ45msc5yO&4SDG9Q|7v1^%VmGGblSNmNaPid{67`ew$X=*2U?znSt zLOSy&arZ_^+#^pP_9TuWN~AQHQsacy9%*R2Zh z%W>0n{6R5!MYlEol;u^jGHpo>bg9iD^X_421_eLM?knz_=jJ5&jZm-H-Hp zEDg*v&i8N3L3b^(pU4w189^}(%*YS$rjw3rtG)E{JEz{6v0*>&O*PUZk6kBo#v>Sf zH5#C|-WCn8Y*$DH+RzVbqAMg(yhF}{@R6L}yso4ip%{zt=ZXdr) z=wj)$={c5n#%uA@cXpaVy#b=*MLDL8v0tjBNt!juvP|`?AkiFW)&$KLV~(Fj5r74> z8uYjbj#<{ocxkE}$8721u^d7i)_vMzB~}PQ5~Cm4jA(-HZK;esW|2;8e{cJqRRd!$ zdv6SLD#|WryGz=C^U2OT;QF<3O^R)Y^-B7W{J^+!b~Ufqj_zk_&e)YKt$g;u4=Nq) z+r-72bkSoHMz?FgR@e$i@oZ z4nht_y=)$5t&)*a%{;Y8t||$a-zY zulU}3&cnZ{1i$b})fIWy*u(v;<622|QutnILQ>IT;=0>yiXf}TK{y5N%b5jaIcj}k zGh@x~&0@kdyib&vM(K~n7m&dtl|h@=Y)2orl}Dm)q4uf;Ra`un&n8Lmn(e7kfDZ$V zGEI;jF(vTtVaa=1nrmIkiLfo%C*IIzjvZXnBr6cn^f)@SPjmBn-#M)mKz7`I`o2_| za(XJ4=BIc{#~agCagl@VmurY8LNrvgB#qavUIL*g$hNnrg!OlrrwC9~h1FyE zchN(#33I0GF^}A)3L7?38;7pTtjUR4TwKMF4{H0J~Njd)(f19Z(l5NBBm+ z+U!nOa)>(7I@u!(9?pqr1~!EX0o?5+Th_XLndV(DI+K784WAKMMW>01Fd((DhKcy$St0|kWF>Ry?x*OMTm zH#)7W5;?NPZLNjdZ*5a3uru;X3nVKbe0^xcN}yFPmKz-ubZ4 z{!%a}MA-(5+$qM`xk5Uy-_U8j-Vk2lB2>?L-7I;wPtckWY`C6am6x39Ll+>ZzdoTZ z8yr3UQNCV|&}^VM+@dZAbB9Ea>t#X-StiNg5Kt2$-)b@DJ`3Srree75u_j#QytmCy>p@I>oeo)8?D5Q-1cGeO zmztdt?oPjxIl8)UJyIFNvFzAa;dv9P2NGc{#)HR#s?} zd6wRDb3W_*eh^9+E=YNso@_GO{*z*`J7C;=UHZ|}$rEl2%%>}=iIpVv%nz_HaB8q8?{cU^y+;*)2})oEoWtzr!attb-%%+;2xIPIE9zIm*rr+3Fcm> zt7E1c%?>lnd%}Ot()3P@a*V;C&N$0K1pqxI9w1N@q$hQM!-pi7)AM^rwIqF-Txzb0GXM*4D> z5pDzXK4gqXfTT`fsX^TJRpE=7LBw4wlr&$G#!N-8YV9N(Jj&ixWRNijcn{jL2W5u& zPL~YG=9O1p5M%9gI;G0uE?YXg66c4yE7voqYKd;?E1K!x7kE}kB%Y3l@Ot;f9pLk= zZ7sbNg@iE+NuIJ+dgp~4H%lju{Nh4-%KO3Wp?Hu#dI~lQ!(Z=qhdgZu2^j@n%I_92&QIfW+&y14IU zH#ZvA)KxEk1?9zbs*0xgl#=Yg>ZRpe4&a&wvH@2K1}JHyr>nv}RB4B>MX>@6s6|lH z99%g~Y~sYVx0RVntZI>PH@N?`A%VH-n5`q5)>B+nx|qYG@x`hBMeE?cJU*?umK=nK zMK>TI-@WWr=9H3~>Rqby_#>gpHpkg>%5Ou_Ip?n=kL$ECPW3JZE7v#X!diu;tH}qbbm%8ua*vdO&(JYn%NT``8=za3+y``qinF>Boso~V|nn`;j&x4q_ zd6r2yP?*w|$u1;&bIfq_S|(`*B4xE-&_e^=RtQtMSn$H&V# z)D#cM3Uu8tFs78mAvSzTVZxX=4Rh)7`h~^&N_QzRnBTY7G88gCi0O1v;)A?aPH$gI z+VI<3Q(zx?7uUgDz^&EbqC9jtr#1C>{8iVTzQ;XanMiEND9$#Qp1tetC{Ag_fkNp{WXW#2uwVN^`IQ0sW^W!x_X{497M0CB>UG{>)!X10@MU| zsDvxYGqocgZmGHz-)};!V$B(xwa?60pSVGubAH?QHm_GFv>a#Hvb5>Za*wv_5|Ni+L&-b33ibnO&XD3wySE_B+w?*jUnYY;NEK{oWefg&g5| zA@7H4&RuX32wC&B7hWSLw!lTeWysy`!Ug+POJXJa2JPxRnE_;uMHv8GM$=@lgpnof zai}F8(4w?+2(ReY^4OvA>d+xq=S1>2G}n2V(axnXyIFRx6INQPJP);%a~>zr5~FR? zU1))O-bLk1xz{`)C`!L)>m<%K**CgUyJz#FgnwXjac+5a#fX)hv_`}~j3HN*xA(E` zLXj}`(mTq%!6%>OtuZ!)5X*@?NGS!A)6vcKJRYjvS)L9j?bfNj{Q~`#pbip3-}K|{ zw!(4`Cx`knj%E0S_ZSEoSZsonbslk6S=E5q+?7UQe`JKL&hRO)KLYu4T-t;<3BfsW zU((Z5?|F$mof<7t{4UEkee<@x9g|lOx^3%;k#(@E2^5>*_}tUbNvi?tGW9dU$ZXXv z5AvRAzYE9vmS+HeZ5gkLlPR_AM7)2{BT_ie(qRF<61Mz0tl>JB-BZ zGhFFqe0WHdQbT~%I>%K+ba!lrz}N~K`FgoW8yv#>YGP!k%FHn$%JFJiWDav|$#P*I zI3w(rRcJz<_a)3@1cgUe+G_y@fHv$6HI2*I6GtyKYNtG3Fe2nH>DLfrwa#$3b+8F@ zcr3z4pk$3dZzWg!=)Hj+CwBp(`+)x?`V1FU36hExHc8bBO<*-SM)L~XMz(4fXMt5l znCH&0ckztJ0v?+@#iokt`1RLeQA0AVwYz*gaw}KUg5X{c;s8+y3Fiss>V4a7dTdxj zti$Q5@7r15=(RH9t!5YAgIgBN*g~JlgBu7^T>8+!ChP*J6fj?eH_Lx-BiE3_3Fpdm8G#=2 zp*&s3wGjfM<0hwC56widQM4>+yDXEQbwM3wL2TKOPeITFNq#9_8f^ZscQk7{_4kPdpu|cY*fx@+b5|mg$`u^c(_9C>p z9yaQ{U3X|vh;lTiKpd9Bd~5)4HT3Yx+cs$H=-37|2MA_j0^e_vUrdR zC#Wph7_cIDZwEkx@<|Zx-pmMYg(GJyu1D?(fLmg0n6Cdsl)EGSKp#--(L;`-kKhaR zSOI7?y&XVGAjHD(pb2j3f9mYRvU|j58<*TD610_uM6*z(tRuX1n<)%$X%>xx4567& zvRsF1_i_FXQp6!r1Pfa|V7DatlYl-}Fs*Vm=miJJW5y#DJxi~eO_?$bPnt-_M$|3d zIxI`OxY-BaLbQopXHn49A8zrehP`^az1DiNI(QV~b)=CGj}9a?XnWWycT%17xkqvf z9xM~=qTun6*`QL=WS%^&ES0zDPTK0xtW$3CA#9Wle(igJuFEakx9=v|W)e{*3E$Md z5`*vnh?M`RrtyFp%s6(ppwqdU7()>Z?X!Tm2@^_|j>jV06Xk{(4{x|WgqK}){YuV0 zq1uNf_okDL*zcV>wPF(KRd*&(t!ua2`Zg2t21D1@k%SkrSQ+fed8iFSkhy0d`%c9H z^g}n@YK?WBwHPZDz762?dfdg~Si7zWN_ndQRDSUfFL-s>a? zQH%Umf#^YM_~)>WGwMcvI%GXXwE2?^)v$x0lEUT0k+LSGDAa;Pz)T`M8@2g_@R5Z$c5;Nm05OCB8Z*w+$aldAM|9&sk#RtCce#W9x*@lXcQNk;sF@wNa_kNm(NTtpW^O0`aMzbV^Ft!0f zSg=j=b!UB>6U-#flxjdcoGfQ6%8Q&U$EgJp&vle;Dm;ssk>YEN{DkX#Vf&p68u~`^ z!P}s6(1%lJ)XZs0JT^`q#>5;}8+elep5sxjR%G}3HP&DG^{il9X8X9tMwrOy#YC87<#7ioHKPEBeL^hcSfXm9(%L_@%4~~g zbLr7_LK_eMO~eLg+@SI@yrhm09-Ka!4H?2`VvDkss|gu{<&ik|+OJ$^ed(G!kZowQ zLu{+ZDgGe|PjD{%`gSVsd$9M_+6JPVJ=?MGu61_wG<(_r$GfX(3nj1?W-vMm78}?j27%})N-<2$=s)JP9EVSAo}qPjY4FD@zmxoF4_3=AhQL9?tTd0 z_2sPhz^5_TEomq3eu4mz^Bvd)y`tB z#z=%7i?%~5Y33n9TIjOEjuQobeW|0ffSawTG%T|7Ig#8ChP>-3CU^T2B#g}Q)Se(< z=~q*uD8BbWX@Z-7>Lv5=ZLzQg-W%uH2^<^pJ4V+25BBU|jpi~AyS@a^18;J6;u~M! zt=&Z}K^amgA0+7&Q+RL9FLpvUCOt_rpRkL@@cH07Aoq?6S1V>b00Hi z0b{3ntjIxf{?9RFk(=j4k{={a3*S+n0HmvQQ0Q`A;R$2%BBgHj@EOzaDP_Gq7Pc)7 zIgl~#T)mlKn0fJ~Rrm5e!e_Pc>04JAVJg@PUIkc>e<}6*|KwNT}i_4qCt~6lxTOn|!Ynbr?lpT(o zwSZ4=fTZ-L-oQ1F`^H9cSPKmStMgA@Z#717YTz!59_S9uEu>sqI<-r@nJ5UkY%R{y z>)EeiYJ}5(Rc=6)$g&k!d`A=2wKh?|g_U%kb%ko&di|FB9J*cRLah+OG6ME$8!w*3 zSdw7$&ekiU0Pe{quTQQ4ZZ|#QSPDGCJo^Rd2k>RtlU5ezaBNQe1&rzSC^ZsnUwOf? zpj;6Ojozlbx2V3qN)>4BRgl<=64fowj6KwJg@&vd} zavQHa%rVJqoWX=Cv5!-@6s^()P1To@RY$U_WJzd7Zs&O@9Tz;)$5bJlCsA$)*{8bh zXp(P&S4&Wh;F`bdg^)p!5Z1kp?zktU^HNu6XT~wDEELXQ-q_exPg95?5&!0xYx4)wrZoMCqmY-Vz-D?#=3kS-qke4N{$@f z%by=TFxx+%A@GjV@W1p}rj!}%w&&lK5G+{s|iCa|a+Kot8?B!S!akVSBQJTC#yW)eps zRrL~0``awl}n@fphxa z4y2AUKr}kxMYrG90ZS2-uNUBv7v{l<@lq~#>33dkwW~1|V)Z%n!D@yBxA{$b3=%Q$ z+(O<>d&pZ|=1q!#PVZQe8tP9LGp~xfSazL9akb2J=VL)LIh%-UuasYkj z(WjH+fbmJCnb7DvkP-ffO$jilaScKgLJ-l{)*ay3~-2`52m-yA~edcjU7NXjHKAWfdpBM&_Vvl@uZwo zlboIPO&?gzLpaK63UXUw#dh*X%=sd_b5L8fsDyXx>qba-T|C_aQ4lg7CyNlhluO}P0lcrFo1q%90yDez#hl|k4!#q)>3b(<&hyzw5*pan=Nmz zO$a>zUu;SFU;|E|8DX#w)HuL%aWwZ5 zaRQzzu!R9l&TzJzM+W_no9_2KGy@U2>l`E_C_XgLHirl@fXM+KACU4z$Q8)rgiH>o zY(nS(-q)M;zoZAp0qaEtPBAz{8SF=9gn?CL7tZVCO0jvZ4W_>4_#GI)y}3Nrlj#D5 z9I)1Zg#1I^e?-3r4f%%V9yCf05o(RWEymm^x&Hz?7O`W@i$lJE_n;^>b^ibs8`5Ba zcqAF+5WxmGnwW=t$=r-jZfFxSIiSrO>&XG;7fN%gj4?pHaH$_I$ALL-_?|dK&<7KV zJ7AApi2D%sz!K60N;zN)1C~BS>Yj93E}?#3Ld1s-O$#C26)runNDT_D+mhH^l}0A}`a9Hd(pNc<7ri%Q*_ z5`1FFfu%j&7GH~{rodVsqC?b^a2(AD<}qd9 z0N-iTaBvFHhf2#IG3Jrkhk^8Ph~zzhGlBukQ8F12lRVUtsUS- z{Zw7-${j&oOlj`M;)Bc{a0CN{gADa0lzLI6l215JB@Y-5Mg&njSn5Y+7q8qFhsc0G zBGndfUb)#O)bR^=zDO+xxNm4=4?q{N*`0&|^oHj+u(vOvy1v8~2EZj<+}%xRO}PUu zU-fK4Jcl?TVnk|Ppv@KN_olRM6Iz!8oJR&85oRmO?E~Oo^N3)=ftWj1+XEWEAftRz zg-xj95OG}~&7Qv`=>q%g0dYRL90NCL1`fx8nM+7HV?#O6-SK{8>jv5<3t$r>(YD<>&a%0?oYjy$T1pa?+ zM-P@O=5fOR4GbV(4E|7TWu@PUm@;N?CJzlJyPh~BBo}bzhzlZDbYfmEiUZ^#TaWC0 ziW4F}WO;}-AbdgC0Y;o+Q4iS1Bh%yzxnE!n1F|_8d`<|TZOd_>=s!VuW1GFG4Ev8* zdQq(#=uUlcTy6RY&!i=s{v zwtonb|1sOmc+gbsnD=PJUkm z+4%W{3(LSA2ZxmNN%L7|SgQFWD!)JxCw2q_l0AU)=NQ18UycL&;k?1r275pwHzYRH`MPzW3D?l=tTuSIUc<$yPE9+z3&YD zg_?z4pThrfro59V>%dzAPVj(T!T*itu}8cPukZPZ8}S*vD5vR1wUpp8Q*4?1!pkco z$=?HqoNNI#1}H{&afJbQKoP!g(^E6ZiG6oNApU)Rg3EYQPoMxW!DVz{7t0y=4Y6OwhKDejP%SD4=|i01Wr(`@G~ z!n9TaH1jcn&b6>O>zCyWZ}acY&yVW?oMU4==pN?}Jjb4go}hPj|*M zGPfvYh3viu{zN`Ao@H+5OVrgFW zP}23|8v9T_!dIBx-d~v836B6_Zs!1DPDg*bhyCdO&dO|p*eG{)zvJ_O4K4%Vi|DO^ zYovZ2Sc)m;L^fw$I^!bZ>~G2&8~KLzVBq%FQf3^8y{N47$dqzG*^i9N0rV%`FxZ3Y zJCuHxdOx`C!|J$6vcML1fbzyxdQmC+6l?nqa4sS4ePCc&tUuX;*AzjGUo5MySC6`2}h@01Vur`GQ;?fIno7 zL!`j~??XejfE)wpL6bbhlj6w6wgI=BP+Nk!0=s$Tqyy9(B3l?hFDl4@G>Q>!i4UUt z*OTRw_0AvP%a&ZgIpMKsL@2|7H6EFH&bP!XKTH1MxuyApi_7y<9xzh4Lbl)q`ghvQ zaPm8|(KYiXF05^C2EOCK>RwFuBamZ@bUd4BV z-FPDz{(v2Fz-$j%>;X9jFmIE~1F`O0%Oy0F1G+tG#Xdx9`g<}Ck&%qUwH#8@jcUuX zTET#*3ygbF+3rDuyy32K!4#wNVAoasefasK#$+PJk++QwA%0L!iVGp0=x+84ISz0i zPHB;qVm;uGS^AS2*A^IHz|0}i@<(na7Z$dU&1Dq_4M;D{=@2B`*;?GTF0h6H%vUB1 z$Z=rAAu{)%v7;XudQ+k&)zV&`RyjwwA92sE?2DQ+y?*8P^T>?)62cAuj}ROpU_h;7 z`Tak!sSojwvI)oMB0rpy)t4M2EFf0Apj&`~lZ5w7jq#7Qav?2gnwH9=O9XKy!C7 zf0oMwh6BYME2=k@_bJxQvs3n@`a$%IfC!pzlw*LBhb{G zTj7=K@JFn1im$CJOuV%G%m#o@0e`U(J@B(@1vg>;)CltTNEXQBL)RD#iF#1QVePJfb3*f7Rq!CU=Fth2NWk{y{h?aTPrgSt;{wB zr+7+p%e7O>0r~H^zCAy2A>ClH2Mzm`+u#s^I{-Pbu)8mlMRHpJzeA??|I&l%RK(md z&Y$<7QS-=n?il{S_$EFIy#j0{56i}KMel<{B(E*FN^;@WrlNH2#e~~H($3=t!Q47G*<{(pEKdeNcxgpyAx=M>xKkH8*S-p7x{SkxH6 z8sn(O6r3{X05gBUj@p8yy@M3K17Zg=+xoHifTO*rG`hecE}<2_fawA;Ho)V=JWi;u z3&UrTj!7 zt1Xx70(>@P&h*wSh$498;)(LFAsnL zeigo0GB5y4B3uHE-3Ip&7 z?(N_*K*9i>KQQpao^ocrpqypq00sstXPa)NoYA;O`Z&P7+9%3YnA6@%W5YpzPiW%F zdXyT?voq(67>`Vh6G9)1uCJ;;lI($Lt$+cs?}(!Rh^8;0F{c=@;so*)VYhP`hVQy8 zHdwf{BEPvGnf*OzbTD8Khsewyk#h(1F`$+MwlMHeh6DY6GiYMvct93yu)G1@~{ zFW~-+tKur`o#0P=RMH(Xzn611q!=4uxd8G9`60XEDywf<+f%Vd&%Y)>GR6BQH(I5g@qUkBGa2ne1pg3qMqNH~HGfyw z9k4|X&&tF2Ly9^>-k0z{8Uvy(;28K(h6Bv8a&*=?=Mu`f1Hi%Tc3Qd5{C7H|(e*uu z6F|M`s1Mcru0HJA%I{^o2HY^j3DL87j{%1W{Go&x3#S;LkMlvCa%+4r$pY{J)xD^w zN39imz??Hu^(O_75V>Rc3JTrOCxo~{k-9uGd5w{6E}`Q*GVT|EJAj8p7l?A;t_^!Y zj)5QdS>%DE7*NhOweZM*gN5C)Xkz&c2P99&H1r^OKz;%Gl}`*LTf$Ql_l4X=&tlXb z!wyi-w?+OKoMPCVavu@zo88`*V#F#8*kKbIVF11%a>l4Z!az|6$xEUeZLd1F538eGD|v$9dl zfn;9PC0x;OMJ*c4(RqLmEwf5xfQShPR@^@zTOtyfW0ABkuoJW%OP80 z0|tCjsoq%0BePXopyiQ$zr_K2Ko0|ZMSB1;01se5j)OJkS!N!xS!Rx50Ck32X^zRp zp;;;4sO?9K`*keI1MU;RFW5^m0X<02>r{TfaldaS^`(S82p$<`8bTKA9v{N`63T50 zymxt@KvrjBS6@PF7*O;d5qnWxTA82XNB#=G0WtsSt^HZfSnC_=^&hcbU&8pLdOR{+ z{)ko=Fq_aEACk)frA?^Dz|Z?XG|L0&`mY=Z_#?gkGhwm9@3a(EX$8 zS#SWK0FV7sf{4TIC$u8nE^~UtrJdqNuus^y0E)H%N-l#3cj&1+m!bq;;~{)Z%WD;LM9BW zW2t+Du0vd5X>17Pg$gP9lPWQw=HSL2(|$3eYFn(EZ~9I;7saGs=g}V7fPqH`J~HbCyJnl(fdTMI(YJiffUJzuE4LLzTh5hy zMv+sl=8?fSKztCrsv$e%@8$M_`g!iluNxf9xJ2o~fR$Nh@;KoI)|)iH7-y#MC-)&& z4+taOPIU#c-jqhQ1!`WoMi*G)5E<$MF;}3#fR0V5hk?fjWjK)LoFiwb>lfJ5n^NjY z$8$x7rZJbGs0r@>SsD>xQfjF-~{F#{;fNuy+>DeWbGS5Es_;s>ZB)Y2H2hkijmT*3w5~XXs%;^yf{alfqbk(sa$UGvE~C{N>z% z>l+Hu9zqp9By?Uk!p6lF1z8_DCI{@PEzn@VJ|3AlZ!F6hYUO|m1Cl);mjh~fV9^D( zFd+3IvNGoyd1F539J)as7l3@2(Z-v_hZJ+JWfcME`)t3|89sU%qV4s6UeMZOTR0=aInh69|*+Cd@u^(C2v0p5oQ{PMMfvMXc)%Wn{` z^;l9Q@q@$~6SZGp_6;4D0~x>o^9hvpfLbPS`+)TwaB$weVQqmFAA&82SYaDk9so8S zxa7zc%4eDB%{azPqczm0UT$j|;(%-q)CsPR3nm#ZatAZUE2n;Rsy;-#2MOc>Y(dC= zaEcKp<=+eO1k|D2+AQ)2b-9F^8Y8VPP;!VQ45;J8lsmR52kgLrTpn2MK_lap8_EHL zK17&dGrGQIt_k!w9_t2C4L5oeOZI@dH>Ip69pb}?3-tJydaB6mO(QJeb6b^gX8b|) zCk2PZ5+Bmxl^gaa6?+mYbB1#Mh=_xm8;ej5ERWDkoiim|Y#Nb^H~=Ng{m7Jk2khgI z@SGv{3mA{g5qrQ+3}|p5`G$Os5$X!$wFQo10Dd86mn07jU^Qbr9)SA?ej$7UTQd!h z5vL3~A2Punxd7RWd$Jel?hQk-iM6N1AJg^FZzBo{2fh zj%V}jZO^rIgg-+5A!^UhE-grL5oI4JU~x=CU#dMT!D@@dhXJS98kbPPA=2_kQQWMp558Ss3p+y zPEc<+yS>O)m+NU@1lYiHEh#rNv8gZByCO%NpXH3~;}F@$CA_`081WNwYIqdD!m7CJ z!~rX%;g#FPANe1`fE))3U0_dNLTkOMbvQ)G9V0Hp&k8@sH1rZ@8X93h#vegHx=GEn zH6`0dg;9^GyuuEO8^l!=WsX{Zz<$J;K%Srv(f$OHGw?0t2oWa+m*fi78Y6#i^?< zLF`d1#fPl+DYm5xEOJ0T=NfhZY{Fijiu`h(7uY!#u|wh!^4TQ@ymH`RdTVXI8GY$c ze+)h;a)e8I1u@->b7!)LE6voc3*CWg)77% z=5=O@p7kmWu-=sPxJ5Om7grRfnLbKR4fIpDO78SoMX6?>w3$TU>BfXN>4rj58R&QG6T zDf9zA8@+5MHxD3Np^Wfoe_6`;!qzX#WQ2ZwiHbwSb4D5T3)K36|IYOS&l%a@he(S7 z-I<0A1D3g#um#ho&JZ)}aBt&*8K+{-*m%bANsE| zxP!0HVv z<%L;4x;xuS&>5DbM=A5;;U38TlGKZdcocN@a-0DR=x>3Ch+|*ER7sNg<)j#qZ7w0_ zk2reP)?UwX;0OlP{1G*;ToWTii~v4?%r8KEX!D2=7LUh03NG-W>Ee7-%sDsDI+t?? zRJDe{1Ns@H&CE@HpzR(sybsaoMfoW=5GHh%)uyOrg8X;b5janLPi~*U7VYtEi z6ld8f=UQ6tLo}wLk6A|J4DlVno8~>&@jK25=dSiUjLw(aF!sJ4SX@l@!Y^qb8ekyv z?Ey7zI1V&@2QnK2=Kdpc-%y-ef!M&xe!^*Zm?BMg}cMBsNe5B7Kv%()ypJ#f1H{$@k2Y8HdtgZeds5g^rLLL5yd8Q$= z2lRVSa6aib(p<~aBm*$V0Qo{(-*vWa|2p7TG%Iqc39f5^k?k1aC| zl^h~PotbQ|C9nXlz>N)JUsCu2`28mQ1BwwMCU9$WaTX&=otck%8|NfjGnirwS*d0M z{Q+H=>}PrcHUw}&_czX#KVVl_!wKZUw$Ty7v%?>0xrF}%7yx!S4vgknT8$565Ccj+ zX>QU7;S@^<6YY7rrr?yIH!YiEBKd{L7fo)i>PJA=ei_xAqCct4`L$yvh!6d^ zw+!9SVvGoJqDOcf_<-bnd12!S;wg{~gnJ)y1Nsl~st)DcgyZuIGo64sLtA`OqjSvY z^Bf13`Xa;$wY{p9Heu#qK&&ycV1U~L=vjM)Y=I=|Sq_^4_dj|QBX5kKCrCGRt*ERy zg={`ND?+L((#|;6%@t7I*qB$YjS(q(Q(8Dgccl6fT@H~HA9_sp`tf;%ssCLt^~m%l zn0)ko!CoBGFsrbS>W}&S67wu8l`e3YSFYiaX?f-9UR3ISWQZF= z?!!0yDdhu-E0<^Xz~h4-D&@fckUgN5A6y=Ya|_ZT2dvCC)y_1uf&oo`QZY`9m@#|+ z%q8hfJuMgY3}*5lv0{0B8}}WcgOM9Rtmq16*hm<#HRoExCrw8$Dm9M`eF?QZGG14L znsc%4JR|%O^?9cEcMA?w7+}4qlzj)}aY9=hA}t2eKid@j2f!aWGAEqn5hp}vr9K!r zD4%Q54O}k3E{EP7QVaHig+pew?|^!am7*t|bql*n`G8pHz$wfHyR*QC5Y0ppMVLXHSUn zXL)18*`{hapfk@--JevA0Yx7oLmrttRxJ63>Ex4U*14-chvUGKFEG{xTn;c1@^ z93n-&NNp3UdQsV~F_LB+Gnr73`mQPa5NL2f@&G(B#DuOGaER11j@2^_)fl*~;*&bs zi%P%NF!OT->NB?IcQX!b$N^=H&h%Vt-0|`javq*?*`_sKx+( z=Qtq#$UAUIm`|YM5ZSf~MHyhL50M%JhCOJk=8eUEWY^XeVzaH96(Y_V*#bCwJP^}w zPWae3JO|^IS;v5ErlD>hq73w+(qKT@e?;#r{x_fFI5?OP`UE}c-Zutp_90SZfb}5Z zb!N(5RM%;)>7I#sh}Y)JYL9z<>Or#2tJ-~a?Y5EX3 zA_sK)mD}QxWge&aJ<5w`90z0C`OiB{b8eYWU=$y)GPgn#D>kqP6n=pYmr!rkIeHBk z<&6<1MD7@Q0?ajn4B&Er#Rz2>FziRB=uK(Rlgv3`jhJAjnmPGi1(tBW|g+588xPX>NmYEpP?@4FjkI3x-RgYSO ze32L*kmP`3uI2m30Ix4m_o8|qoZ`$lclBqyI$25R5uKw2$AOcR$uzS^;FjamgTxkx zDAW3pX>3CKI7IS(WC{$BPSE6wtjw>`%rHu4PdYUQ%zV=SP+h>W@JlN3;8H$(9+}%S zjN`yWf1*G4ZhCgU{d24`r5BZz*``K$V-_do+yQ&|BUW++>iM>qU!m+%>{wsI|0@^( z7G55!BaRQ1_;^*aQE2JTkR>PtWgvpQgq86d|2e(zG(x8J#ixr}QK zGl~I?O=#SQNZX^dtMx^D4bbc`mmRWGsJ-g z2f=K(qj`ksP&*l(0iUGt9NgupwTs63&90Nx3 zfWM|xq(^w_%v`t6QV$A;derJ*z&zX35e#T}_j5Y|Eu1NPv6VWEIW#O^7%PJFw9>J6>g z1B3zl=i6GDU6MgLpzTTb|6TrwE!_bB@V9R&{@S5_SUn>Q*oy){G7W)I9CAMEeuX$u36;(&i%wm{dcu}uQw4kw1+zPXNi z4^fQ?t39+QSHS1lWe@|m9N~}X_9e81f&UZy5oQx0F7%xILBxqk4!qbkyV(3sO8XYJ z=luC=)Ny4;-U~dmspa2n@!%Yrj?d5c=nB=FeM^sv#3dDNLW7xxx)?Cv6q|K{Wwxn9 z7_h+~dEb2r_3qisc#fYv>3(40_w#GZ9GTufU;omDydxYr!_2^ee7$0MJ@T}z?cZ#6 z-|R_giFr?7URmHV`2_FGIYZ4{OYn#&Um(q|QE&-0ymFoSwvNoZSMbXJ@AJr>9;)!- z{+{yR?wVC>{ls1oy{cCVC>7}Inx5k~zlY0%|Na!rxt9O`UJE&tDhp}!tMFF-oQL?_ zZ}R7y#OLs0%OCN%c*S!g@e)Dtlqnp~y;DBthvyW_noR7uzm>1^z;lYPVluhtJm-Yx z?kV4^_nZl@!$bZ@@3|;?LA>g9|9#G6`tLRP?=`?}@n0PL_Zs{c2cm-b@4fIpcQ2%g zm)!g}uD$v>{I2)hJ@dQ7f8aH$=a|i)XK$$N5@v(w*(!QAjmq|6_K?a(s?45?r@t(J zL|f{f;yE4L%td@ov9O=`oZenM@I;npHk;G-%N;2M6ygaAI)ytK=s?-udTM$5(FUT+SZJ{!5t8t8q_~k~Tbf^5oZ{&vW{Wi>@&5dfY!xe|;YP=kU}G zZ#JYXxR;~W(e;OKZ~ZgnyWKel?$5b7b>Hru7kYK?(!cufZ>Pm2j7xS(aE>rVc?Ohx zDEJo$5BKieq1o%vHJY3+boodLlk@Mtw_CGd)SRCt zcC5Rr)S}O>?~8qM!qu<=3Dw@Xid z>a=NN|M)5?-8=6)KE&B=Z<8U;XJ&Z1Z7W{ygxk7tTfc4IZSf~>e>$7q* z*!O;|Dt$NoM9gg0sBT$(?!5YKZT0Yfo|L*a;QLpDn};uIGs+ew&tUMBkU?gqtIu_qy}izp3e3*UpO$rMCR#;nG~4Pt7b}amvfJ**foA z5E)y(&!$yN0`o50RcT_!mBZg%c~`scyWdkQ3RCY2E6*P*w`s$kl!Bh|H+;v3zgb_X zTIE0brT)}v&mT@d4NN=WTeez(4%ZS$DNxq zxN1zX^?z0A;nAeMY4McKevAJ(p89X=3-5jxem~JRxml&yzqbWN{jlbr0aHi54u5;* z&DvM*K6tuw@JFqOG!I#MIp?ZYyQ4cl?zpIBNOVBk;lm>9-3V%$cU1DMiO=G%<>-=U z=wGkD72bU`^4W`Wtw-)U8yB9_>EfuiW#9cMya}KB$*Xcg@Rb}LpRGQW{od95PM_aO zZ5tW!L($7|<6JiTp$Zuc$ znfk_~?zB&S_f9=GtoNK<3*Wj<5lR>FD}BG|`EsS>%hYUgy4Lo4+aD}G`bq8Z({qLm zS-7!R$BxPOFF!3hrQFIMU1Mr}Hl^6ruNr?e^UA%#Ej)^?A6cZw9{-)8&#o>NUJLhs zx;ymMkByhUZt>U4r+0?ujlW+#_08<3BYym=)qoFs3O)8#@D3fndhxMGH)b7cTrTR= zwjl%NK8dV1ZtIM^PUT8AE9sH#^u1bfXO8Xq!ufi&c0PxX6#u^Q)84mUmwNZS^Ok@9 z5q?R{yZhTedJfHJYWH!|KWhsQq6-PHD!-n-^_N{6cgFSEdpHw^|N8%FKdDl#T%)^g8yA!bCiHGxhSkIy*xCiUOwk&k*D zxIby@E>qOc#p`r+%Nl7qJm_i@&%U39l$jazX2!4+?J91s)bguo1-h)NP$hbA*@Npg zL@s->?D4M1>5mI}t=qfG`_DhO&U!PiUEqy_1OIwH?1wFn9;SHRDVUP%wTo^03o5`F59L$?+^FMhMhx-4Fgrk1Z8-S6pwJdbh}8(3jkpN&uU zc4+x$Ij(6xUe{^91rvhJZ{lYoA4DDpFPmfzWo{y*dq;l@>Q+70YcY0RW2dRJm z**f=-M`uz4KK%Kz_m7PW)W48_f78|VuI;Sn)^7Uv!h51all>cvSe_*y^mosn<5D_x z{39;0L%%*%Q{%=x?e=s-vzQK-8@DVSS+ZZNX2o}&os>7$V{4yZ&xXH!cK)B#u)jyN zou9N}=JE-tFOE%W{db|eJ}suWCY*BjES$LU`)h6DQ<545KJK;H?evDv$87#&;mYJ0 zb;Da6+m>>DSLc|5Eq9Ty92(g!$ZK87=2eFZRjM%hn^a+;@b`=7!XI~w%-S{C_4^Y0 z_k8vCa_ZY+WA8_N@cglGuF$Af&986R`|#(MQz9>h$4uT_Z2gu=uXAs_^Vi9hwF`YV zZ1dn>ZqF{*ti4bC6^~87r~Ywa#Pq#W8yEjA(XH^2L5)tXozU2&%gx`*&1u{I@yQQf z{%|QWxJkRX#brl_ym8L4S)FH z^0nw8tvZJn`*hTgUu`@%_T0$7qDP*X-*n~h&QZ5q!VWz5UYdQ#FWm+27R}e)NEvuD z$p6LfeR|c68j-f$ZS|1b?KX}i>~HDoSAE2QzPr{JTK1s%W3OQwC%HL&Uq0t`_uAgs zetMPq_nIM}ed`j@YpU;OZ^{XacCFlWa&q&(>ePuEyu50cED48qkDgm?R+HvJqc7bX z7Hd?bV7+~XBKFK1ytnVr3j65OmrwUv7dK-}GA&Y}M(nBA8{%d>_NsAg_vN6b(}Ihi zZ+|;UxFcM-ap2QGF4r4x$}#ZM-v@X8;nmP1gL0Nyme1?Ak&VJjmO3-5dZRv}kJmJ= zk=7>u>f+U5(TxKylKv@QVA8vLr=Jv^+|jG{;g0!6IYUJrbq@daVan8;@BSG1^30Qp zlT0;w#`}EwVwW)Ub?%21Yu9?$xW=-gZ5s^?T~lUzm1|X;6LS?CzV1%5usI8hoWEQ* z#w}&To>Boxr%ohZ4Jkb{-u-$Ox*|oIHc$I|c;#1qi%!LT^Wb@))0F{t|BOHQ`}lvF z73%fx(1CL|jJ=e+;GOU^>2ya|C(d+JeKxl>(?Da-7kflT77eJ(DYiByaSsxDSXS@ zD{SiN$XnOvMK!s8dhefS-63t)_O($X8{TMjWW$Nn7iX@e z)eG$Tm1kP3)VRe%|2>%CZC{LJYEu{Fv1 z?6$b-spz7r#p@{KcU=sV@fYT=(EbLeZItVb!*;-n__d$jZ}CPIu^BB)0gSV{_wk#I8@- z-l#?1QbFTuHO;cQSIww4#~L-+?6M|i;nk4Io45CBPTynF#COciP^`xlVvjPdvhClLtb#a|{71uUfZl#>R zv-6KT7yrn&Z9`;4SmVkyY}#?MLQ1|-mq~obb%@T9 zw616C8d1ATta;G9W@6AEpI*7vdtckXul>`@C7*Nbp1T7|t@rcIfAd24$yxC+_2;KF z^$qktpHx2WLB%Ju#D+^bz2>wGEODx2&XTh?cO38XI_0Mar!SSL?YyRHi()<(yXU)- zZ{?oKe^>AiO543K{EM(<3+k`mUiu^nqgp2y_>}Hhr|jOce_RP^=X|yK^%o?wiliMo zyEo-pKK9^>VwGd^j%wB5M%VTar+HNVYMooHTuDuz?!N8T`qy`z z6N;`6&+7FstWC$j8AU4|oYI8w?%uw{^^kFww!MsObFuNH?MZ!$7rdQzuj!@UgZK9T z#;HK5$IZuWUG>kLLZe>9EjvP=Jt8vn?{%AQ_TN!%U$2Db$shJwwK944j)>A9*M3=@ zfa22OLCdotg-$sii>&uSt{I;+E>m>z<|EIhZkjQA%Bj77KhD*%psq|>g^%bW>XGAS25%NQW;vL7i7A@%hMTrJ?_dYAwZOg48 z-pl7EM)!$7zHQl-y|t<}zLKSJKBqC8n+IMz*{Xc!`W?KY3cD>2e!Kd{h)T7cHs?C= zB;2dYg<^9yO|E^d=Q!7t8pV&M?Y=#9-H!X?Z=dS4wEnS>N^|S&Ihg%w-)fh}1=UG8 zn&4gLz>B?Q-J32ew%m2+Poq;x`j7psMQzWIbNko&{C19s?f;tZTBT)@U(%|byM5n$ z>0GHr^SqXS5AI(vHz48F`*HYHka|55u6l1 zW|Y_V*Lz(XH)ytLvuXWR|Ki2l+<)fMv_fCk#b=`T{t=XA=pfIj=N6Xf`$Mk}HsA6I z9UJBU@P{YR;is=U%_@6x{g{`Je|l3i&8=F40CF>nd{id%NH?FJk;x)U{QAYdl({pm-ixT!YI^qq>nAl0 zt(0^jJb&`Yr|Y}MCpfvC&-bw7m#+f~jf#o6G-<>PQ-Ni3r%tK+>D0$bt(=Nyx$$A& zp!QM8SqJ{xyvLh;OFtj}@0;$kU6*ZfdA&7dbIaLjr$gIc4P2d6IKKX~K~3M}|Jl1^ z?P+Bab5^~%s7$AhH(wTcUB)RSX+^SWRnoHHxR{tBuPW9_{VnibrA|M4?7jctlSeTJ z#x@-JGW_1Tv(CE{53dgI9DU4fbxgAXm1h5Vs#bxLNrU}Ta&&B*>p|UOWkz)?;`21v zr%GJtt_OdIpUTppS%;3x*RM<1@vu>@3$K>CX31JQI{I;)j)BWIPdWBY_0oSnI1_cS z$)?7ATMqiU)V(9EE4%rB_Uoo!nuhdGT-7g2-u!1?)Qp(@(W9cRujTg&^Xy)r=BeeC zI=7v8=3IwHuOrizZ7JNc*qj$%&1^rgc3899r^6CI`Rjaev$^?;Jw5K}^vCd+!H*LQ z&%0hVX=kew-yi<6OCG`VgC`Xm<)h13xPR#5#ipRP4`)O*-*YUb;K0H)owK)?wQTc@ zs8dI~Y~GO;<=ppNsGrBtT62e0Z&-gt?)5F}uX8`ux5Mk#t!k$AXyksgMz(e>npPU> zTf0zQ=V@a$2j-3cx$Up_^8QxpfaiD7hgZiH&t39ZvmueAil%g`nO3~T>O)QD`aTUR zU&-4iGU8OxKh8I}-@RzFhJDsgjH$mm_s266H?1ueSop`{b@CRU+b8sk!gIe~>RxH_ z9p?tSr~Y*6ThCiPfBS82)Ofn_UoWW{)qKnH8S!fl^^LoFqe+t{SIT#9aV+PQlNFlG zS~T{n&&jdbRH0_doc7U?PCpC^p7eRAgrfTz69;;L@+n^sGE%^R;73WSZgC6(lGyF)yS|<0-m$J`~JQx!h z(PaB2)9RevM-OrjEIcXs6EDvb33+?`b>q#6Jz1Q`j)-_WBhSf>X>TSZy9U&rKDSt% znN>IUC=hrtamU0>-Z4ubR;gWg_3{Jt|E%Uby8imam`Sdu@7L=6ZR@}`2~K0qg8#39eO6tkDhNE1s z-1@7Q^O195OJgse?i9Z9+?C=_%(|29UPhk;39Yb!PzUOSJ^?CP_qsLW^8S$iR%-?5v zAI={Be2M9Ze}^`v{Cn>B{N$0vclFu)w>!oZ&z5xP(95{A9g_=%%!-;;!s|`P{#Wmh zF1pZjetYMAd*c&skNjd%{9mS3M@*A*e_TDZVl}tbD`x#u%i~GzqZ5j|czIs9d^*d8 zDW@aaH(F7=z_MOhKTn=|=E7&wgHrz)+3xq>>Ti7g_UEnlySE-!zI6QG3%wH??4SDk z{=b(0Kbo$?pRG6CClOmKVyh8KDWOJd7NOK^(Nde(T6=F|7O}V1R<-x2+FR{el-g=5 zYVUh~_kQkwkaN!aJkR&}jyEm74AP4uo18zRsghr621A{#5LNz@X_uUI_bdK7bH9R! z)WhFD#T)Fv}Xz41&{dj_A-U}c3 z4bRRus{Bv+2egtB&F|_5#S%C6|M+9>C#y^9=s7YRP+P9?Cj(2Pr9u4EH1&XAo-WUz zVuSe}&sdF0z3F^CXVKx^&}%`EjVxc!nox5ME9gs9x=G9<_>X6td2!JB-<I-i%0xNH4X*@WnwIxXH*NvH64Jo!?$D&G(B(m|y~3 z?pyE0-s@~8i25bWNidjs^bmyY&~K(QIr|-1+5GM3ZAS7`AXsysHLJhp1Q7>{+sOT5 zLYOeySSBr0$;66*Ub4qSbG|8s%Y#s1;YHh8a6*)!!=qgIj<|%yZ++mw)p^^5RT!wV z_f0PUX!0IyTcHK2Eq{BHzmq(qUD*C67Wyh>@;(~ICH%Od6B39Q{%4Zet zX`LRv7aEkk!OZX$+fXIXJ>IGxuNaTgDGPJ{HT5h%_Qy|3>GZ^Lje;k1T&SGIlLp^$ zH)_Swae0VZd;ChcA{VWEI{S`1EgZ)AU2!0%2O6QTmR^n8{A20RoqsOjTRM^N@TYMk z+viwV)rs%+`-t)Nf)`lPy_l&F4uL6Fo-bTo>x15&Egtc@PD_+{9+L%?3U;)~joKc` zC68$omfs3fh3|kI_Dfj^6Ie^sUukG|NpLT-P~o~TfP%_DnKCb0jJbk%gEH``LbzBF zthzQ*P6{AJBU2I>5h1H&f1+LhH125ruK)p+*)wkQmx6!)tcg)MSR+hRJT5dAwx1x| zbe@H2WO_jY#oT~{BDJE?P>DlE+&&Jcil$$l^Zk<-D$Eu<8`W8JwUD zLsprUbtc}Xu=%f1#hPI2zsy98=0dPVh9x-Sgr@&Q8eY=9mq$p^*Pen(e=ZFTnb_XV zrAT^1_Xad!c;Et?+!jyD5VyZwn~|9T0#Cd8G`5z#-zc;VSGYZ4jk!%xf`Fj8{AW>-F z#fHz(JQs{~jjnABnILTIusRwes%LIdI71E#)hg9D*~u8IMLbfCgYtVwX&V1RB&@6u zKUw}gF_PnS3!$bm=5dk=#f~&%E<^5V-I-*zZ(r?$i6Iq;guY#JdB_+Ngm1_>gV!z} z$3)TzW-b>hvI>H#<3u!E6Ek^Z3iqD}qFt##y!NY+S|_V2Z67Ad>U6dB_`a(JtS@(> zmT>AGU#jrdSO}qhzE*Z)jp?I4fjZJFVT-fwA2)1!nq1sdq|`UkMf*pu`kh;(c>a0T z=6YVHBvR;d`#kEd@s60dH_B*dx`oJiOa&Nnv4#Jl%t*2bJyJQL1Q3Fr6ikq3!(XbXQ2H27zTr) zjbb*xI#JC~=~xW6HS*gq-d%z5>TLbRpGH(n#M)~(l>F-iYfN)idBRn_EXHff3b>g2 zsACB7Voi?jXiZQ{fx#q3`GKQH=Opql#O{F!)j7cp2uUE!RDYi;im~_jYyr7(?Z5)1 zp=tZGVc2zWGHo&xc*bm5JLoS(UdD82`obKv=JKmX+@B+Ju{&h!M?Er&hbL}5-Q@*- z_C^e$A}uGU5(FbmpH)jIs#&(w|E(o*IJFwH1IhR}hX1$s`~@lE*e8aU^GRSeOmXb_ zE!mP6#%%NUZ9seaT)UE#^Vbh}T47KSEbsog*%vxOR9Rq91p%r6*H|vtc=&Ytb*noa4G29U zgJ@9=OPnx7EV>exeA0a`b|91htx@v#sMEuyCd$gfIUIKjI^sL_*$HK)$ssz$*)H1@ zJa<38kqr)mZSNgC-0(X9FZ}t)n7oDzLRhiq!`ss~YvT<3;#VVCK7WX2lX-HT%!tyrXj_Wa zstMOUylYK%2c_*0M}BDpj~8AyD>~BGjAV5F_S!dbTr`003G!D~sDKD(s)i_IKruWu z--}p5%Pl|U5o6*inxILSvFiigvg?z0^q$~iW9vlw51%vRUH_{*ToGCDQ^m(kh3}nt z40$$huL|sD3A`)`vY(V30f^1Yk=))&0Rz*D!h!WC(^kXmtyKpg#2uC|r5}N%IncTY zGM=fwSx#njdjrX%qHc^z8^gIyTl}Kc)dZi1Ci@7N)jSUGss?hB1%gSOm>7H3TAG9F zj)fATyCf*1!MA564yr87usbAFqSnwu@O{U?kuz5i0Te#{iTfq)T)eRQvzL?oU{~c}YJ0nbG0uZTX ziDC4zcq3nh2YIv$3}SqkB&4S!y>X3;Oc%$l7fx8Fi+>!Rq;NZRzX>qY*L`iPn2_Ug z2=6$*!e*l2p_l66)g0WGQw3dB+ZoW9t?+d*L1q21s!lL2wS)d(s}pEK#V>h4hide1 z)?uN3iu>9GT59%HDcr#>UDSrhr3Zz#U10MYv3tb6O~+lNK!BB{mr|t7qIXCh`0Z$= z_1u$!I@1%c^ld&S#=(M3ul&Df=X%l{F*aw{`Kgyl7NAHDbL0P8dz37Oa|E8@*34Z? zO|E2$IHmplCVnSO4u*{2U{^)iJPr8?5nD!5s}CiK6h7fS6;WEY2gygMN)CrCl{olw z?UTw#T4~;U6r4#w$O_6^Id6{72vk)&Y^{0M+2J3U;(M7|fc*f@z;|2qRQ5itMh6iX zu2VWljcRNezeQMn8ey+v#eD z-=&ho{gJI~-X7kg%g-ry2fQg8%z9c&>`3gLU+K49Gjbf{G?*F6Ozvs$ArNA@%bR3Y zHUHwd#~yn>4ZK$=jqM_ga3d#4Ryo4|v4{4%rWCPad#6AA7riCOw=#nb=6`>{AH$}* zJEX=g9%sfg(RFl@t6-+Ayz{gi$-n*%d3+XWF2E5iDM0M!I`#aPR2tyFKrVWiLl4WZ z?Wx_}3H%oDGUXt>m^@&6C5>LX7P<={by4mlm&Sa;qJ91Suvza`@8(XUtu-}2cfe|Y z7E2*ph`361zYl7Ey`PYiH2B==3CoeQ}`TH6+ojWl{M> zJAR7C`mf20T)y)elV||Pg@22RGXcn+k+;O@*yOexO@AkHHlWN9AfpZ+tUdF$!6SK4 z5W=$7ez`CoEXf-mpMuo6raN-{T3@wVh?;+jei8;JF%h_^`+qNh-C{*a%cNP3BY;Zy^VlFCHq*JdxPuj&`Ba@OsJ10I8}$#|7RDv6A@ zr_bwk1x?m68soQG#^1h=D}xRmjeU1kaHrOf4*;DOX;gfa%b^Tj;VxcUAzJ(Ehk9JF ziV0>;dY#cVxMOE9KczSA_!gj4EKBi<$1vi;1Xh-?;ZfLbwp0caJx0T>t!Uclryi^q zuhvE@5-;E#aqr#DUEt~Xn+pTw3|n8!Nu~$M=>aU@-Sze}bP-O*1TySp-TiYTip++P zkFT~i%*zSkAbx)-DXJf~x2iqbp*EeFhHaCElEL;4e34oondO^94pk&cjJ5mfbmw<@ zWH@9;KeT^NT?ThIQH0pDv!nDiQ|_jh6f+Ua0Ut0({x%L34qOKYDE1Zd`apb;Q5dr> z$L@}8(2Mo#!62X=7%{P0%qL=cDFS$cSOmYdfO zmDxP(F6CA-m=(FmHsoV*uep4UvRte#BUrl$=^@U#;*OsMNBYA&v$+A2;fnp;7hON=O&{slU8`VX&iG(rk9l?I)P;;@?=NVu#xI-k%oQGOZ3kNSy4FB zxzFy5e(YiRczfcKi&hfCa^4bQRnx8~%1v(Dq_V~~4eS>2pOcc4E=2FNoK7_1_e3&y zu*OjNgk@mX#XqgRp5`E&d4$=j`jL{T@V$dK4zp0{WXJ;d#LN4L6NUs=A_pbzAhW&E z0~4A+=Q}bdnfE02Bg5z8A)w6LU&v#i0#ma!olt+IZZOrhf5e{q-6D20Cl%`O*gmX` z!-W9@s~HAH@GTSiw)u=~c*u#t^ZnzWYgc169JmbhD4P?`Ng>;|wy&=WN)7Sdyh+|Q zgW-F=g8?u!GYv6WjV^6(QXgeNwTu-ar|D0F@Y$Lh@6EQ=pEOAV&j#^#s>W~_F?v~*=ssgK+r-qLFQWAwyFP!UKA}u;IK6I7m%&!9|_vH za0Jlc0`bSZHyD1=(%@J>$UU1MXuQoxE-rRR<6a{qxK5=#TDXhwujknvQ)3LGu>b0p zA`QXR(8ZBqWTHlBKC!sqq|Swrr`I339GS3 z2hme!qIqo-?)h#fsiRsD&Q4Ei?65vY<&84o<~Kyqq%o zy6HFz^~!41k9KRnpL>!B*!eOIJ(T-C@%`^VRK9JkYBV;X z(jckn55sYCkV%(H_iN~DzncfwD>1vPTkZqy3Q?@4S7aQ%f47acS z(mG(ymIR$%b{lrfVJ=22U{LDM7~oJ6L(Y$!*2C|;mi5#h7 zf@XqHGc~h_(zQD|A*_XXG}Z2!y+$PikM6`6DTRqNyy5JzZN`;y3NyianvJ(rg_lV) ze2G+c(VG?}JP-$8y@c@cdrk9!vk&64Y|Kr_3>3Ot#zY+{l<-N-bfXZMXA)8VpY7JL zuj0OpC3=0S%Tb3Tf)Ja|_3<{Rkbu{IA~Ng63xc`5Ne+^F#aPtjP=p1!avvSN^77;Q zHkgm;q-@1Z*KTpS(cjXWYcu-kz2QSzrG){vsgj-_Lqh-25y+-1M)R}QNVKoPeZ5)1 z4?9z6fLr19ys}v$^Kod8LoB&(aj^cGAV0qvk0K=N;X}*a z*~FULJOg&B2KVoLU}>1EXrHyTZ4UHZE*UL~irPUBLBi*@gfKVsxVMm}G#`CPyIR_0 zmhEsO81dVc3(svAnA#w!kb-i<=rOUg9G7*H>n+-f7=O|G06tM$tO2{`wr_0ameoAH zd0gg?8J5T@Wde@RKtO8+Mqu&$(UaaBS*1Ney1DY+ujSY3H8xsB7MI?^(3slNGEWGL z7nT)75cv{JNb!_clZ)_k01+!OQw9j*Ym1bor`{uHuQ>ke<+*z3^VD@O=ox<( zP9Ti4Kd4w44MWZRl_5469Q#1$W+kgPRyBY6t2)K=(5xnkarH%*ExjZOcm$_#n$LY; zh^G0*^=X|oUZqXTFLU}c5SsM9vHJ)kq6}a9+&N?a=uhSLwbgrzF@7ltLf9=su+yR^ zd~d=6+j!eT>g7$Z>-*ON{qth|QISnvkcPFc2B4&jxLEt9?L3?YHa&hGCnJ!w5GO>n zlwH-=gp6a?h7~Lg=;z3$+n1*jAQ*qlOPomk#>?n;TIBwiZ&VuBgnF zaLFj<$e_!0#ny!!j9Ko*GMRGgIlPIOx`FthJ4*6t(6W!4@p07L z1=o-iM~QvJEv2WhLR}l^RU@&+zMzF!*j^T}kq-Oql;{5R!tGhyLhq?u3%|6T4Me|x zY&cy%c#l<;jvC~$!AYyB6D9dU+~1TS%ZpF$zNcjxZcKzCK8!DGa;1tDQx!xeu2djw ztOl(X(0Xpq>qp4LX1P1Vy!CZwII){egF;?LfkfeKB4&1DxLv!M$fz+gHQBJeLk$0- zR-^GM0-Sz-vkero{~H@CwvQ3}9=@NM*))#aEP9`)_y7?fXeuB) zUqYmX34^gp>v2)Gyf^a}7YP~r?XC8)J9>FJxl4==`XqbJIJs_nFKmWB{W#23N&F+? z8UhXCeB`Ug}l)m%O$@oKSK9ruRb4YA2VX4uw8=xw@T| zOZrVgQy-bu!8YNKP5`Hngh&*d3HES31%aE-3 zP|S4C5Kp3(*yL;;d)agEI`_f2qRJ@l2h;0vjwZ^!&Q#Iri{|GcCzPjq>`FRhPp++BYqqdqx&ty(K$&&@ zl}9BD#i0^Lo{$8r`@4cYhvLv%Y5CnJu26Vh5M)f_9M-YP-`mjrorJRWWU9?e>`%K zOOE$$tSIXT3m3$eOkx@Ps!|}YJ-V5Aa&JE3iB*0v;x+!zVeP2E??MY}!-lmF3>>3m8SXVq^*3;V^vQ4?vqo4?3?6y`>wfaM_jvL?E!d1%rUEqp=YVQl5 z0T->S)ZX}V0a}L>Wr(A0UZi0`eYhutZcg!w(<*;q*|oigJmQdEUp6&eR_?#;f`SIA z0QvEoy6^YZIH}){#%_4_` zI6>%xI$d7ZMIU?iM=(!@pmf)wVjW(O(wGo;?SzqC>-bebzJhXQ>=nYg!ss23O zf)!`_D=*Z+y5w$UBX^|IO>wyhPLn|Qv=kY^s>=BYBkY@1c?B?K$3GXWFTnFLpfB%2 zyDoyDpl6$44kZq@-Xt!b^he6NY{uFLJcci?a--LTZumE?>mLaH`~H_ECJgG>JIulZ zsem2XHT5avWq!8pN)clawwgA+&c^4fFcH(DVX;h}`F0=m{4Rr!^Iq8(Wpy~pm08`9 zgusJ=5VeD>Ecw$=e?6}`D&f*lzXLRc zAQ|_5rq`hyPeuJEK&c;5Siq=&Fq~FTyp2K8)rS8P7g0WH($jWKD?)Cxk4tt)adNNW z7M396q2%*4Ak}cRYm(ie8PV5Z@Gii+Sh=9w&kS118IzF%P;F2`mdkZ62g_n>84V3i zrA^803N`QHOfDeY4!Y!juH!Y(h+H3kN;4-#fsRZXD+fk&bfry%Hpd)X?P=;4% zq*Wv)nI+Ef>ibB?ho?EsS4BIV1yvFC6jw3ZC__UH?;yTrTjig%12TU$T;w3%hu=U| z8}vosh`PwrN)t2|8?5GLF)wPbXtwoMaRtu!I1Pc8!VGs~2dcXTO)7d{wnGem@hd;y z4Tk0zCSjo|2AX&n(fd@!fIx#d=fg?cIvLzl3qYyrd|l} zR=z|$=#R<6uYRZ}!^WTjfi^w3>OpWR)vR1}s!%7I#GYQbn7v(Vp;w62N^u|yq&ahV z-oy0u?6y%M2;(9O9M0GPn+DO%bm%BGe3UjT3W^eTFz~(YX01>C3k4N^SpvUzT`dp7Vw{v~k_+v17=Ohl2`JueP3L)lozkM&nMUbi^xMAf%SOivu)uSL;S)xcwK z5^gRGZ&q}z?Dl3M7Ri+Vyj{!*m^Fbno4{gS>+mN}GYF*ldfUCgZZ5o1Y#%lHo;Z4c zSesS9TkIqi3a@_2&@EF^y{YbVd$Ljdf zCtG9M`h`2_(>CTB70SU+yr2Z*licWo@<;g6OM;}yrcuo>s<-{MN`STSQWVy&_d~Ni zkr}YJbcbz&sKl@Z*vqy!Ry02+CBprz)#;Q(t6G~LlN)4r`PkM)BNh5|SX$v&ij?A; zxyD=qKMKIXdnjU zz;GK@<+MTk)%rz@Cr&Y=FU;s)_m|t{q7`If2u=eg-&spm^Xq0mVyEFbYm6_W9uDuc zwv2sW^?(@F060MjqIR~}>P%aKU8K*gr2{||4L0y8rTt?{d{6~P%D%~ZVgA_VQs=Fh z-|<2Fd}g?(kjMQjI;%Scl!a-SH5p;(w};U775G;}zg(G}eWT+jUA4vfSbS}P1|D{v z0)xywLf)i@b9QifTn!iUUo7w#z!VJD>2fr;PYc2ee(;^GLUpBfLXSirc+w&JhS#uG6ld32rda7tv2Lj~){6ULFJ%*erBSQ-^N9C*jw4Ydj?XuE@nwcW_Nm zTomgZr!XwdfKR;dhZ+7=&8Bl_6tUMxp1n`G!wFx|4iCb81g83!dm{n&&c9Sj6% z7N@O4)c;bpOID56l0Gk$eUjNEUVL8?q%``jeT-T!(F;;FF#u#$ z238bFJ=p*w;%NR9MTm1V3D0!FWK;G5F=3!;MIKeauV0Syem(VRx`9zADW`Ei5;&ezVd^qoN*=3hr(~4LC2Y1}=OQFE52rm7eiMcxP+;VAN7+^!c`*Kc(Y&jK| zm{2vZ9|K;piP%dfFN&;p|8VsUiYQJV)V(KCdIcRiTTn5^yI{>q+hu!%=H>jlCUBj` zKHy$4_1UoPx^X|X_cj6?6*$)Hc91u#IO2FAj}jeS3NO=4W2Z9eudOP{DUR7!&^**m zv`?I%glR(P8b#@n)HoG29kz^;S=&HJ@JCnDSFY_Z>L+&6}emb;VBzgG?W zb>5@#*;Ts@D|hc*ron9Fy3SL@WHTQry{L;E-V)eM6DMAoDKJ(K=JO8_EqDp(uIj3* zbYGJ{ES}By=x?JtHd8~G4Zp_P*;w;=*&~;8*6PEn)gSq{z~52o{c*|D?oA3d{ZGT+ z#rbDOERO*!4Y1+ioK0-wcpwrhX2u~QiJxrqFy(XSwbMwTOOxXci711x?l<&WrJVaW z$rwq&{x(<$bIG#N z;iiAC9)KczY1Nd7KH|OWz!Ge%hcEJh>W}PZFk`)pIF%mXBo`}b0Q(MQ ztI*PnC4*$t@GmEXE4{D)(CR}Iv!k*pjf3%ijOSYdBTyY={h;)Sy25$8hPHgaQ@fEa zaLz7N;_D|BEk420GNQ5~)|jMRcLfi)YWcGs=%f`_JZ3@eG8p^e$vCd~)62Y>OTcVY zA1IjIrc zKpIbr3h#9H9g$%?J$0!`FJRh`-o%xeO=1I+8<}qBw`o)8jlb1^BPgV*!@5aHsgc)4 zh`>Oa76+k}jRy@gKw*l5m+O`EXs^)YY~wH1ch z(TP}Ryn>XkWauPb3T;d;h>99ZJ-PPio)MqK{IrA*(a^UA&NCf57-sSVjmNIq3^}=X zb4x*KDA$FMC;(k;<{@4~+W`h3P!I3j`ez`4>~X&eh+UzDRo|y^>%B*Sjpr=WpWnOd zr*tCe-e_+A6H&5;D`P786$T3uDJ!I9jH{^B$(#U>pRY2M?dK>Mjuti+R~mC+Tr zpD9U~1E2lC5N+Tv>s*N2I+BW%5#o?;j1$yy-@T5N(A4zE;jyIRMev!VeBD9+t044w zk9dD{a}2783x~g9n?X04EBbeY%YVT(EdZm!IX)_#eEv|scHpTEk7=0c1`Q3zy`^DZ zo%7R}_fpy;>SaX{&=8M#?|BheZ1l@s`!L}El+<3x&IPq6=W_V47vr|jZ;;n~!^#Os zU1acnEADTB>^$sT! z4-R4IiFO!NamBcK<&FFo2;rzfFKj>-62aFZq{nu*aW(RggrAZ!`l^|n{h#C`&abcW zYXNVG{n3!Zz{qHDbp@=ej?ekZrhDWI#qtUZpj1&FTGS5!^U9)-^D~y1WU{W(d4)Ao zme2Lhhve_feJgtqlBBk-rioQGqIk4YnKh;upp*;32V}I6WKa9vXh%oKLceg(fr)V{ zatf+G5!Cy4fBx|D+rwc{y~zK20itmBcWG-DJo25p78hy=1Kk!GeBL$TTNr_W^iQ2l zaH9*?673W&2(y#pbIkE+pB}2`mYSaDrH2%qyFRdAm-zPzEPOx2bx)Z}naz!-gKgGY zg*B^Jqpy-eRat#e3%|J+`p;p@g|a(b9uWrq!J4JVs`2tQbt7w=UhW(4rPQQZnK1MK z=hm-=3PTod|DJGY+VF|cUsA|uEE$RT0cbJTFttq4D5;lnpY^%(hCz#lc9?tH^_hX( z#-R4?`=feHFg}GS*3>DyjFLg=BNj&FXM4YN!eX#}vh8D|`MnQ2(~qm!e{{2Ht5WKB z8^tC;zkH@O&Hl0NGBo4kUzTT@+H${W^V6g@Av~)nrX;L|pC50=4GyyF^J)EbM-Dk3 zOZHl^tJR@Uy7I#uD7M3YYN;slx!>JXybQTGa4ri1>!CJI3K)woDuHaJUWAz7AT=2X zU2fd*J)8xzk{UdqGic@GK!CWtC{sFW<6znG=q4k&hrzAw7lq9To8=@a9GwxJJ!i+2 zK?McPh4^jpn`d}sEC!7JmCw@EPi%T!=dtTP_2Umdqzv1>H+@tn8DCv+I>}8~aqQir zt*)o@U*+t7#lVokYyWeQ=X5NN;HJG%#831-R&;V>7C z@}aR>in5Y2^`(Se*-=AQTE8gpdMxU~W5aqgPu-ZgBaaRCA^T)ltefluCi+TFj?eu} zUmEqsPF<8`@CTiJ@c#+;zde;4P%scZJ{I`c!8cb@IjI6)VC8H4g{_HAdJ&zb*=RnDD#>prp~Y9i+eIH-BG&O6b6+h|;vb97#;_!8k;5bXd zG&6Pgj0!HKo!H-1Dz~|2A3dsNa*$q5B!^UoE(kRGeVk zlERKfkpM+tp6SJ?wBJhl0pb4_CyG5fz>a6CR&>wC&rt# zOS+Z0n-GBxe6*~lb9@K56}lwHs}jmQJj8kWh}yhK0YeXs?~;EIN5o^tx?lSe*!9N! zm#nZ}Xo{X;oLb)0j2Iy(2!1Xgu=Sz4X$}8Bd!iM>^QgS7@{N9;0GHFrpr7(HO5cju;=dfnmLUJ}18{)q#Ot z5PW)Z)O~fU7SNN#?I;p@#;E{lKvk0^BXvGOtEy1{lnC{!>(Q)8Zk`|0r!O@7@}J!* z3k9d*w$XGiwYl%)mTuuE zQC^S{@#x@vKc=p#+{YayWiNBL{h>Pf!=R$3l>beW)Sm3*{%!Sc7W4(L3BQxoo9&T% zCg=ky)r6zR{LDM}mx?IJH$$LnP9)BkE>i9v@KL6+S}dZE zJjy$=6P?!GY?5+Tt8$FK1jFmEt~*&|!``$3e-`+!IK^jK78FWFZ00gFo2v-P zDU-z`2-*`A-58-n-V!dC8Qk-7ra=-^562})P4O$8K_eRa>y?L%S>ManMF4359>Nrx z;uv7Qa_)>g9S9wldJq4BV`|HH1o%sfZrUtH!uZ;%R9jbFC0=AHs}KhGJOW=#y%E6! zhnkoiANHclwcpwbqX?FSEn98`zqX#(0FEhG@6XMC0D`NHqA@{lfKzf{VLo)2$cqA! z3kzaW1`}N17-c6Nok^kE&Rkt9EZe$0S`luto+&pZC_cT4=Sv}Yk_gFo(Lko){dMJRa1)kN%$U#UPvt>rGb~_hYfjBfb zyB6qI$_cFCgKp{GXp?P8U-I;ce5v_Q<|;DgPdPQFh41I~cT|mjcP~6=;~tQ8XJ_xRe!nbk`if`Mr6oG?4 z(|{K;u(0)9Y0&SEd%>$}kEHB*?sy!?c^FbNk|{K;KofBPUb}X!;_bVDm1A?bs>@z( zwW&Ru8%5bqVI`yzXy$+IeIK%(QBS%^6|gO+*4vTs{7lk{vIYan7{P^l64U5HZdl0| z>y~QMe-Zv_I=4r9X5=OX^WFk??v^QZ78zw#o#c(?su8!rnY16}8-(#6P``f@sayO;Q7#W%7o38f;oK7;Vyb>XmBjWzQ_MZ>6 zM(M4aJyN`gS(@R~`)&_@;kFpr{`vG^QCyM2h^d+)PLO9xY#qjxVE+1%#IwhH4SLG5g z_es$9J5P1Swl}K1(Vpz|oMyL@FUH*^52KrJRTOSN zUbmdyiUIs*#1IT~$bJ*f-y($y_e>O7a^YZ}q~n76x~MK^B=Mza=p9>BVAju$?~-?q zcqDK1-6sUDJqK2Zv#q%v?+;%br2rlJ+MN7_SRFf)WZx41OtUudaLTu7x2sBgApJ9N zXk+Pni3uC3Clk*5Dc-O@Rr4%c>^^HphUj=l_94mdh1(Qeex<}w zf61E*Z>@+TowY>ohB3XcndXt8=a_q#Lt#%}qmtH^Xy;8{{iG=fT}Y_o#Oxa>T<%r4 zPRO>Y*J5o@H%KGjeBZ*6Qu+^8fsp5Is*<02+J{uEMOEG8jy(v3)+YG;D(yB`vi+KL zVf_W?^v^lgylba_Nx}c+O9~~goGN1c1egPh+x{N83=s}*o4&e5UcYGkEeJ*@-8<7&W3z5*zGL;ocN{wa(E4Qk+6cGu9^-U?TMo--*zU243Cv#YH!oOa^KI} zoCouX+y_S0Vd*#ft-C9y!PZ))!J~7n#Bn6gqe-bRq<{hCTSuPKx%&eQt9Zp#;=Q+iDTZi^guA>%CXO& zmz~7llcCE6JDt9f#|x#U3#c~n;@@NM`rOdb9m)BGmtz%@1^whsn?Is5@Vrb|Uo{-$ z{rULygOhYIbab={@cWlCB}W*ltbb`yROp>g2&DL&iXa(oaWN=UiwLPH=BJsPbT3C&i!-rIMf2r(Y zV~xBLme1rgrbeM#LW|*6Up;duN`CfCKCm-J?@-CVo8ktpd}#tT5NdkQ)emwzPAB_{ zQx#FjbI=aR}fdE<@2YTUY!x70=hH10!pfE@|5sxXo?ER+- zY#InIaRve$z`$2vw|M_Q3Rw?DzlKcD$?F36fYZZOEm2R3_7#@RB}7TK(QVoKZ&1$~LwZDg0{`@{I1yeB{esFTK0bNTRDX@<4)QJR zx4v(%#g(NINR8@s%-DRxy!_mFimORD$H_)~PW9`o+i*k_r2uGQHwdu8R2|Hqh2-7S zlZ0df@T|>frGr*{yu!{S&T@e^0|i9$Z>@98kA*~eOnE|y%{-BpBjmOldoA!mV=*ft z6jLj4FQms(9~acYdD_2)AH>#+4LB4=(&UI#cs_UgH-=NkY#6&fEWn zXuF|8j~Xi%WqiAb^QVO_JvpbD{N?Fxh?rpiN5`8EvnsY_@2`4h+L?CA507=YNbwb& zm*Z>&#=XrY7{Zau;p1a!>I5F|dwRz&U zL8m8ejI`}b!wdw?_gAEU?1X=?Aq0(0g0o809*fhD{(uvGn7htJDk*85Z$j7o&TWq? zCm8cH3Ble>QUZ(}oUV(+Ws05PY&}YCUB+nXf{3S!Lhv8BG$%vB;l?BgBLaT@as3@8 z_5o@8EqKRwk(g|w7~U9$2aq}_K2+c3zpW8Q4x=UkB8zNu^4Y>XS)#;2ict#L6JVRC zy#KaTa_ej!P%XH}KcNqgXm}HDz`HG?MieCRHPa7VdvC|+=gyctf+@LygELV>9wgtV zTFch;jV}fSCdl(P4?K9HDx{L$v$EW&_1uvp9ZP+I#I7EjyS?!sXz1N$RF_?o_THjsr8p4kPc~j2=KUF8vFQfQ#B@HvG824_Fw%+R$6YL*CrU4)t z4F!<2hisJyE2cgOfQpTD{F=b6E>!1euz#Z&@uR77xua3$r;8$Ol4Uw<@-ri*a|C6| zMu5W{NMAFJ+Hys4L-BC+R0rr;2xb0#5+&2quVYgLBHl>iAXVPQk3wSD(_c=OzRnOO z_SgUBBfTXfUDWra{;>6+^r%lq5gq5<9N>>ZO4V_7PYDT{2Upq!NU=t1{P-DygaDg6^VBwdJxqxR^j2Rb!?WVX2f4?e68a{>Y!ny;>%Ubb{}l@#0# zxFTIU?^)h1*#JS$!H54edj8!g0{0ADF`-TO8NAIA3oZ)N4LD)qX8q4&gT2L9_z@u7 zCl5dPYb7v!WbTN!Lniz4(O557w5N79=Lf=mTe{ znrCMPfm=hBV$7CWi7uR9D%6#7cKRo57ez3?p-j$%7U1$r+}CTG(PslaUlD&6ytKkL z19;1Jz|Qr@!FGQMIigB`ms-I`@8g3GrG8}-XSNvc-QSHhigovM3wR*>g@k(Z<2rc>y-&>w-X;J6?nc-D^td|RSO(L@%03dO8rHTy zf7~E3R=Q?4zuxs`IdUcJ&v-a}uR_9mL&hp|K5sS*p|j%S2|S<8#F(|h z0SQIAyZIjOeZ7BC;Kp~>+8kW+{ASP0-m_=+InOzc$_j&&RGY*~IOCaL0`v(Sa{>2G z;yafTfqMinXYB073}v_pi16dNehqn)3=&%MT9guzloEM>q45t2KvA*emn#2)(FSuDj4W^6^TRQ?$rYytWS|M$GUffhDnOOjRV^p(5lv! zGolJp`VrIqS66u8gP{Dz+1$^SW+%0!XnjojfvJ684AAfm;pl%e8%caQT(z_Btj!-e zfnR$eWsMm-OTX6Z0aY1Ts;Q4s8GK;UG#l!fa9rT#xc>dS{LqoIrJ1&+P1rQ~@ZPjX zYA^TuW6ptRXc?QCXf{s{k?JcFakr)gc4kkI6-Ve0=baKGEEyib53Hi^sG^_ou(_dS zOCpt zjw4%qGbhq4aPD*k{gRTU>==rkkc8kMRG>`uccy^_+IpVg7NUMSQlwX&ZBu}sd9;b4A$#pFE$QxDPu^1^_=E_AP$Z&T3tqk|~gRd1aSd^sA;S5vyFy9_w0`z+?%G8Y)JAoluU1ny z)Y%PI<+zj~T2FekPC@JUm)p8PUW)M8L{@LWg{5>dj^q@%k~no!eIU=7IVIU~Y2th$ zd6bn%bE6V&oyW|>8C*n8jd1G1XJmv6-TLkVhWVUt}XRn5wmu+K3BY>g?d zGX(VX;irA`%uWMxDiz_ITmiRvJ~n;Ae>Q= zGvIbwIzVX7m<$5h6^J`04z!G78(fc{#~&R=)Z%R*&wlyfrZn#7^?7|6>bfy_T03dZ z{N)PBcucyEulp{C-ir(4pq6usxr4DDm19Z93`YKiNLeK2;J(Pt=})|z!7^VuVu_0A zuw5it+r!8_9k0hYnUfW&fI7y2fGtH;UwvEhIYz=b%&ERKz{rCL<)d>p)1ZaRTlS^7 zh*`9GCye9*GE+(EKYO-anS>g$G#|itm0ec1zRw9J?u?MP7ueK_q74+OibHngWSd5t z2w(Yybq>*>t*JB_LhIK=wD+XS^;Snu8XH2X_-G}8Ihp$^&FGH;~-bb$uRwTSs z+V(Xv2fCQ3i3%nve&0@7rjIhDw^&77-B-T7MHQ9W_9B|7Pp?GIoEw|~Yc7Aq^J)mV zbBnw*vOw<8J1aB&kB#8p+{$Uu{N#OsH9g5Mx>c=}va>d>bm> zE4t1#d3_wGm5D0I8KPCfWo0>0`{X(VLME+&qh-MiD(YM`WJ`p!HDO=RdUfbAEMn8b zuJ~&pcgie?`5AtL$(=GFuG%tRjXbRL{H@@^}ILPMNfwutd8!W*%P_)Z#4xtF-)Qz0M*Dck2wCFFp_qi*Dot&PI!L4T2 zhl`%s+PBYCSdb_f&6$27VCo(9kF`J1sUsJI<8<5&Sy+ZO=8KQ$wRX|`fWn+9BlBpx zAcqdx$79EAz}is=WE0GdN_j`nKxu1QVWnI`$GC4{?(MqgyY_TX43&xWycFaN5PBKa z^`7!vV%^AowO*wrDvnnc)_78#fxdkw>Ip=V^w6=R89=d}*U|cla#*AO+1#L%xcUl& zLAK+YuEpVT1@#k`&5{biMijzzuIITi{8!Tpi$cu%&jd$%3Q)@ooTa+G1y+khyx!{w zZAZJTiO`*$)x$N=b6dREIEN{U?{*Q*&4tx2VE&vGH(sc2dE#GNRfP$-;r~lV2)Jv6 z0Vxd^OeNe-SDT!)mbF$5HE=K6G#Dt8AUca*!5niFq8TszM;t4bU>pzD)=j{(ct5pV zzURECuj-NhmzGOvFg69%yvegp@4~1&-nm6{UPhYT%i~B4gCSn-d znF~$2D}#uU5eK6i*G{HtraCVi15#EKiWf4t+*w1&YU7vmaWNod-ilk;xfUI$f@ z5gUjtIs63DTVb>6rH_ZAgQu@dr4f%B9)=<2C*i}s?uJvQ#Z6sO&|1Z?{es;W`nXnl zn1z9=wuap#|Yd5zTo&jg(C1ShP2muFh)|E#FisU+Kmi-Kz z3$+H*Gsm%!)lpA0K}Q&IUdz`%g;Em;K9ocgoY|RUja#X{-&ADa3s{=`4Nf(vxF z0jo?_>|?WZS=|zaJq4+?*R{h_n<%%?620(nl~7~8qi-zHSK|;N>IF0?S;a~Y^-Qm7 zqy>3G(5DN14}DHvHO2%FQ(B7v2}R7B;D1Wli&YFl^X1Sm{_me@Kd{wi8e1T zt-0eBgL$tUv)qhxT{;vMP3+je#vIDC<$V%sHZ-HEVN&4|8=8AwetMF&vE3+ZS4B#~ zA^|00Qo-+Zp0Qmhhi%crQ}8MVMGM5B!ZoIp)tMk0sZz3@+h@dCn6&)&s1qkXN#ph@T2ZpxXDYiB$6|A78DC`N*<;sty1{ zplA};sTzE$3l;OkZbh}Vbs!0enel8Cx6jE#H6&1fQVX+bxjft+eITV`LKNh?1N<39 zg@t$s=~BE;2I?E|tx2@Bq%vq3FH8rjwR|=Rvj7jMl$U`OIQB`I;DVJa!`g5nUDWSB z2VkLBxWniM(2P6!@v;wrxR&y=rQ;RhIvj1CZFhNuOeE`kidkEy&@c}TT#V;cWvRnc zK9HYb1oE-#3MUb4I+eL1o3RhJ+z?|H4NK1H>xCi2p$LgyZ{Rq&?F}n2Z#=F+H55XN({RO_eui4f{`Px2YzVGBAO1u3cx33bYwLT=EPDzsF|0^Lb3*YGioACOb`No5$wi=F@WZjkljP zb1|f_vj+tK*^p&?&CKqlSrl?q>~u|g=5~9xMF6tPkep9s_$OR}Z)Zw)mjM&$Vz0Z( zfdlBVwZH1(Z`rUC#36V%$U41vleSMfFB0da zct;9<_P{B{yP@y^^e?&>f`((_h`U)=OxE*-*>UIO;4u8A}w$Uf!G zj;e)4Z*I(jau2p>m@wj94L*N5`ZCAEj!*Wodtaap*KD3#k|f)rUL0WC72;*z z9Lj@1V=D0vBy`RdXu{~V!TcYgX>86bRQH-LF`l2HEdy?(2E~8YTjL(hffx6gWbvFG z@Ol9dy=H7U+P+IgtT7GWQzzyRpa~`-p&(h_6K-UA<|Zpl0$rcF1?@vqgPP*id~2jr z6E5>{&ir|6A;u_s#KOx9d7qF7m`;MhRw6dOBF)W{0s5kk@dPx&G8Z+-E1Y8=G~7ha zB4!_Xt1@4;P%G*wg+Rmx=rhFU;ghOr{BTk+cr#|PJw+dzAfm*DubRz>PK z`t~g>IusjQP{y}@GW_l@ea#TOPE(2nnmJ@AEvm5I+bHdN)hqNI8}CXq)ZZnWZcFf290&E{y?)sd=Sc$sY~_|Kv&+fFd$E|)!}rV&P|w>+3k3xW5f8om(bpFs`k&W<}{9#h1maBlwcTxM_p-6{b;D z(QZn;2Ncf+GyJM8e7+@(29rqB|I-QMGu;#NoauK?*33vwtB`r z|HiX=JwmtS6$c#6gphm0wOG)!s5q#3#*0S^ZX-^D8XpAXom2ZfT^Re=)liF7EQ;S$ zjv|Pfn{Q#b@2(C>J}oOHKge(w;CG!6UE)$l6H+OyyUb#)%sXUy5mkhD;elh%ZyRyp z$5G=w-S8u08KUZX{N zOm^eG<+z~5{P>RP;uz&z>4Sce!{|g^*-LaLT9N12Pu*&Y8hM0EO{Oh47QJ7cLBG+Q z==In~XS;Zc*A+JCjkY?(uY%c?_*{ygkL2;1snI?)(YmWN)YGDJVeXzNN$4T*q>D?j zoY3iS3vbwSCV5WBkJX-k?=|P{8O8h9SsbcY$0An7Wjmcp=4dV^CHRTg z8j<~m`qhb5I2PMW+ zrFX+0KvQ(^zAjtW!DEE3t>e5w4FE?%>=D2!T>FThEfTbNsRU;lgD)IDqaJ8-+KjyR zE`TW}ogSQZOmJqjqCKfkxXV)R#Zq&s_O%Ta&OBig%6!XmYqjrYEUPU?LluEc&Fl7c zvy!8e9IVe`rgpgmj<_99gLXQ*R{?MGF!Fw-ua5LtIZD*Y6!5ae2;P$-uO>lNi??`Y z%%xQMeR5#cQb&+qxpdm_nDBv!UMXKICBW0HxxaaI?SYBqgRBsL#gsQKdf7+;1L9V5 zs?05DE7-BhX+o@0J_)0Tz|cuUvR(y~(o%X112=ePkMgdB0DBoVy%tHd@|m+1?0y?k zTg@~-RCa;sMA!)FNx*U3wH6AQcOoN$1O#2#M&~{(n=JEt29S$EiRMhI=#0>`3Ykl! zJk@Ow5MH{XO<!4fbt4+WZJdGrkWHVvyB3X`lCM9qKQdOJ8_lCE6pjQ0X72 z+e7rgtuMvylyJsr964TiB3#&}mtW|5z-L>N!nmf43L-@FK)#StRAyQjE2&DHc628@ zW3opYrla}pww{yM{pkZG#g&iwbnB4CI^A)8RJAn$^s-oJ zx_Xm)1W&Ce#EO_lBz(aW?hrG^OV&9@f{(y-nevv*E`?g!u;be~I9Y?(S6%WwnT~iv z%)(7jE>pl8Ry-ej!U8{D51PiF!~Cqzsrq@#*kqe|z%EXoGUlpVq>sUARE&YhgA>ZZ zsj4g_Zz*7*Nj(MdNW$4!d$X!2LfHX^Q#xQdA(R~m^Z3sva#3R1k|^OSE-ufppw4mH zF3%{>J2hdi)s0N0T~HYe#HwDh?1Q3vSf^cC=e~=gwqIklju~LA=$YP2FtZD*`yfs> zmQ4y=3;t;pjZSN3?8w4Rt}fLWZV)%wP^Z%+)A_CJNIJ~eQ)L5^K4BU1Ny=j?!tUd+ z=g9klJlaYwCKGM}kp8OfjyReknjh?_Ci@Bok3v6|N>+u*3n6^;83bK5rp99h9V{8B zt?Y4JXG7Zs;Tr9Xi943Xw!z@1aHf@BFjX&96DX=<7i5e}jZ+`c1ZwBkkTbQ>JBeQL zikuuCA#g{BRa2SPI#ZLFE}ob;cb8cRHWiyhYLoOzz;Z3LDV6BzRJC?pS=M6D&t;Uu zOf^8Rb>90rabe|rZDZ`C4-`d-lxgv(lL0fBWgl#Hu%eqO=?xx5IOM z57+9$pw-Eh2K@o{Y_38@VPf+skAA%to>b3r%lRcm=FW9Bb35Usn5*MeNZXZ)^UG#+ zwhw3Gs$U$i_2I2A$F9>ANFyj&VY!ik2tq|6h^9!o5h2zqdxiB+=3=t3g(cd*fK6Ml zD}&TNI#$5PnW3P~6YJ@)^H1KRV$wq|4iWO#FX=$o4$0@?sYJ6@AQJf48O3vnj)7~& z(WXrYe^s@lYQp9GUQo$X!`ix3#Zjl1%P|fsva6E$l})?wH2#@MPa3i#=HBu**J%t} z;}(A-w6!be(8KL8H)rKoJy@NX76nx}Fcak5Hd8KYG7@HLS_0xQS`T`E;INX|M7_59 zw0jvFjK9GeBA9Z ze(CU{I{J8HWhMMA*+GkY!qs`MPGPL@lB&!n21EHN{WwE1K7tkM{N0^`ZIw@fAr{yX zvtX~pD__!D<8guU7={SE8}B?3tI0%T*>{3PejeDYq;<#zlAB`&LLPBIO$GS@WnIrRYrM5R2vJ_No>jN!56onoi6< zbM-xKdc&bDIHYLt4#Agk#pO*?BU}e1iC{C${LKiiP|c2=_M^h=9m9h1n%P&YRH=@Y ztAfPPDo;CI3Lis9AJ1PkToY2B=Hoy`&<6;Y_5$TQq>*Tnc(txGr5iHKUUM)cePtAC zOXnP}0QMMSsh5MBTL*rD{@o;VetKve&7RFEWDu-{*lW1BE!<=KU0DBENyxT;o)C_7 zO?kT&#Y*YgK5)-K5Dx#ru3V0|kq=T(cQA`XUMXMGwkcQC!Sv3X(t)etP7hh{o>gCD zUM~zZ`P$02HR(>fYC1D~d)icX^VdAwb6Lsp4QC&!4(y#0!c>^AO!&#pX6q{J9;Kyp zsp^1$C3AD`r5{_ejJ_K6a+&>fc2H?hkPk&$1O+ot+u_nnnsbC`PlcyU*pA z=2nxsVtyg=S|~I#z*HwAbd=&4!}tXEo&PaHo8hX=by8$umr7kAe!eA~QcKM<@*%hk zVC&U7n%CT9++yUydKSdXD>4j)%f|<`IDQ^%)J8A}gGpUJNbj*U5HOvV=EVcb0B>{4 zXJs_?c2Pvua>@wCQRGu`2~6fDci#%ILxiq%zi4bD5lr*L^75nXKjF0?@+Ji4-Bq(} z2|%-U6jPtWF-J$PJU)StUnk-p#391)Vd0Rmf}NSb-Qmtkpj)QM^KpRM4Z&hMG;YMo zN+l#e&I~{2=eD$WbTIEzmMR zTaqXb3!+@us#%($6|uOJY&bxIbs3_|WB(1Im~80DCOwh;-07RGaH(_?EXnCsw{9QY z&}4T`qIr7!ZwH&7yQ>*swUDpBV$8<*A0^mYhtzA>yTwv1u`aX@1@R+&nIJJhH%0^Q z2%yXKwnh4~v4Qc*Y^q;c2xV_ucH^+8OY0mTd82zk&+)x?r`;nNo#|wKl$?>|NIDPK zTq=v_2=5?IuC^#U_t-6ciV*AqQy&{bJL!cQ>XfPlFTD!3mrTNlfEQo#Xb>&ou{kYtUi#< z)K@}ujzdJ3SKttD%7b2YHv||riUawujb+%!*jh^(QUyclX{7cvM;?_je;%O}$qxi0 zV<;R^j^~_rCsk6OhmT|`e$2*N>TW?EMk_G>7M&VRDT$P z8%s!!t7n449d55g{Au<|Q?*2;nGOE5G{f&Y`C}rQC_!Hu>=JHw&)c8{BMydc8P_a| zH&4q82}JSmmvzSX-$>EFf&_qOwJ5pFGLwudor7~9n2``E-8;beFus<`z+NZ9 zbp-E&T7$rZk8qX45NpZ$by0_7aBA^m*JYIamI5F48ZwzgC(`CqodfH20@xPx3>xnc zd&-HzOX>0VZA+i$dMZVCu=Qxwgbi1@NbBZXy)Z6e4@@M|LJ;tPj~nGF`58P5bX^+5 z4*8KcDi)Kq@t5aX)VDTLIs@vKoTDIz#Y%BCj?}9)NKAwsUD>;kXZ*u7pxF)jH7qPQ&ZgO+?q1OFPgd5~hUWUCD~% z9^$@94BS@ODkN`-fJt6=#Sei>duiY$`LZ8=bIr~4E1#!b$hUz@7*+Jddix16r?Fw} z)$x*kLK@)Ogb1~15z0I{3W79>AK($zzyYAW*f?uhpo8k&uXFUy3Pz<&K1k&7=8#>q zL>%s<_nA2h?+nC1Z}sfIENX#}?D4{xchb~n;a9DpjC;!e2~iE7K=|E&>DLO^6+Ssp z%9QsbjbNhZ0a>lXjOGEEQ(VuK+0tO*s;ils!8f7Y1C1WJaCuO;zWq2(h^8;FW#H{{ z`rJJl+-Ah%lX6Gko3F@R-Gz~GIW)~?WyUIbj;b#OAd%PFWrNqP*z6&ifP@!l+Xdjk z^L5&bt8IjYl~(A*67X^ zBBFWSR%@%?T$FjZe0kX!8hh-=(vZoyrH>h3idb2RP38;#>1s3m^3;67)}F_1O?y9| zAttemw8G(HjtDLS{M{>*R+FgjYiAA2jQjEFo?1fT=_=m|#Uo=)(@0=pRYaDz^=kr| z9-Xj$2q$xDGiBaUZ&*hJJX*+wd%~0K3fKz+9dg!y{wc2fHxLNpcXC<0uL2;X$Oi2Xjt|2@Vg zAAv{;DQC=7y)F_56jh&T+9+O#c5U#5zB>^KCYiX0Liqr?CJDiqd@|-D@pdKc_k{|; zeHqGFgY{CHEj~!p&#Oz5$;k#s#?+OtIvC{02(zjjmA12GVIT+LFXv4ODwDzC_dZ!g zMFLOc*=GqOGsJ6yPNf4Q@&VgVcB7i)RdnsI45r%V!1;|`C;EK?DQEN_31@R27d}O7 z(RL(f*E?|!E_Ge2(sb&jW!PQ`W3bk=@p~~@bW(#jfFpXHv;VB;T<{7;%@#R;25#Wn zUI&8>P%P_vA4Uxu|Kj;e6fXlQ7SdSkr-z&5BJMuSB4}{Tm!0TUG1n%q9*Nd!&2lIV zt3S0JhSc(e%!E@ahXrZkb)3e{2B*`f*^HbYN3*dVdXAeIB+kUUPB+^)$Y7N4+SO{C zjp|^rvmZ9PvHJLV1i47*EDX>z=`)q51S()+NKO@-XyPKap?c*NOKZ>~!7*7JS|ViR z`17ju7#T#)FPsjcwdl|l&?$67@{_8HaVbPz6lrKXddLKDjG9sCF`bK|shRJ3N@0Jm zsu>b4r`~hW8L!|0170yPsRq-X+MKv_TojL7++j^&eToE_w68u%HPmUy3p#I83OmG| zSe`{N1$tCIs?l6?W^vx$XorQp7qF*)3aU@Hb&3~=)?eZ%rS%}HO z6U+Ma5(JD{%%C^xWnT^4C33HlN(^Q+Pn!kINR*#ETS~Ku_6o{lRUs|nFaMALSbJGD)-#^g(t_uHUw@oFX=&NHq}~k2 zRMU|lGz0^V?2{3$3-r+!FH#wE8Rb7Ov= z_=cF}6HIW0emN|Fu%#`qNUWO!=XJzly1XMlMbV&%f;tC$9%LT`+vY_nPYifcBW1go zndAx&EV&lDP^YiGr>=aM>nBF zjzU@LdW`ufhO^*)duOH+159Q+jVDJRnra(8Yi$nc(c~QvhFVbSLbTN~kF5joq`}?e zDSSEdprYyr_1C?1+B{d;B(lsi1q+A5^RnG0ROeLnJl3qfq@15={Z}t@10y|gjlFn> z`>W{chAaAJD#)vl6U&;46(CQJr^GaZK;i1 zUVSe4Nd?2Upn=uIDkOq}N6qv|?X#RoESTFnGgY@=moO`eOCHU|hu65_+t1g0$qYS^m&Q~NE4(l9YwiLp}Q`ymv#CfyRi; zXL&jeQ5SF#{_HSKa5H4{FKW_M4OH|*9hh+f5t^ZfM^+AXYip5|CB|peufxPxD)A%K zG}DByKY0}I^nC;Mc=zgj=6wn&zo%CS$Q7mz`IuB%3rB42m#!0)1QBw2;a1B=|8NMh zav0J1gwIB$^k7wJvD?dtD=*`6dEdV5C0RH(UXK)7Z*$@3T`IQ+=aH~+D~z7Ds}9h643zY$*!6K@5!;$}_CF>mnborg z>*x7Cm(cXRQkZS-Av+h|h<0qfCnPptBeR!@#~EiiZKQX_YjDF`UbXuZKudFw~`^uKp#z zQsK(x)%>BD)igBvQ|fKCN}ffsp6b|l<1mDfWLltMM+J*ANuQqJfWm+zVwX%D*BmC3 zHOF?MfjyHyaPeApVj+4(Qlp`d&23;eoLTm)$+`{pt5j@(&)7cM7}Ylj&)$OKXPTMS zE~zytuX*2h4Q^B`wN;5F3l%ri8JZ9?h+}XD*l*85Kg&uxvO1$m5ieNQpq|VIvlP}D ze>N={R@0ci4-{p5RGs*UWk;$lYrz6_CwRSxG)9Vcl5mbqfA;iC3)ddbR`vAWe#ug4 zIgDaN%Qp-HdF|jPpq>Mrbw+cOq&Xs3jzQi&s|u#4QcSc0;d<|gEA;of_Z|adj;ciH z9V>q#_NA&B@w^#u0|B2aYfj`-I?SgtUMc);SUn8!_C?crpSYhE#8krh z`d0R(&#|7nBr97brn1wA4-={-a;v-%NaGQl!2s5y%?Gg z(i!(H^xHb&S1AWo?8{qdZVN3%ZShT%1`W(Nhs485QFtere-1#5@u;CA6m$Np5ebD}v!3k%sjhB-Pp_@qp=u zi%t@&3^*!Gnj*T8sR%5OpftXTO~+|@zr1iw#N*N#-^+37a$T;=#=8ia-z%jf63OR7 zU9`tc(%u$yY?`1Pe${&&sZ6W-bxXPCZ97uO*?LqJp!FH~u}wvH`C+QkX73>LHGZnD?1leYMl*40`ZnzV#U)G(;od^v zRBw0k{$Zz;d_=k__xAIF@Ds}gw@=wZX*?fs*@j*h9fq^v*I5^TZbyk?UsX2ocj2|O z(j{3S?ktvE%VLWW=)kU$H%$diD%Zn(uMUd0rrqWDI zSInyBV7<`z3X_C@>#IN%ZMky{?F}RD@!zXXFwLBGeRnBUE`zLqzZ#3DAm+d0m3q`Q zrt+4jvq77^rtJf48AZYSV!KQNs*OfRdn8_*H>U+Qy}Us{uUA$Fy-naZ;fn!Z@o(SU z0sXFEBq8$1p}eHItCdbVS+O;Vg0$94a`9{Bk>~Q3v{Fp5afWH$8|-4M+})v`e3p)B zf039`#MdpvsVj*4h<)4}cIMaBh|KseY{Y~GXON;Jz~B#a6S0oyz;wM-qulJ4@yz4E z!<48tyY^Udfu_n8UPBTGMA}a08gyc)4TltuyckESk1}OpH869j@>`|8aAo+;P|Ue< za8ye&vf=+VFL@~U%aY915WSa`IH#X;JQqW86bv8v1boi)~1ERNfoSBGc##}blkp>dXlqTUIgS8Z+4**lo{t78x>B9`S$ zITJtOb9-VquySifF~+JX)8c%;ya;R3fL@J#nfyWx()!!HxacnSBh^CVP60I54Ihp% zS?isynmkIAVBw;g!=*Ym{(oFhQVD;UC%3nIIEAn2sO{H41E*y9#Ogp0hKXj zMTc;mfKe2G{yc#1B>0MS&re^G-9;%qc&7VxMTrsDqQ8`C&$Q$#?|01@)3Rsr9~amc zNC;%SfF&K+=}QC-Dt&e|9s7gDU?TWZ1kteZ+0c6k4*E1J?dk00H9Tzu#K0g?zco1T zX=hXOzJmk2w_a9YR;2tI?3FbnC+S2RNxz?EUWvU^?d&pMx7-e|X-?#RVYh@r_K^eS z_}NuouzZP##K11vlF1@LCLdg4t}03Y6E)jW$J_}$rr^$eR)Oa%#RW;<8aKMt!}Y(r zjln12*(?cl5c-(nc~lj|bgdmt=kBi4DTE6cA(Weo$?}*%7V&*~mmj z)iO~U-ELXD+3`W@%UVvIhTK9Y?cA2f3F<+i)mViyXn|iGBud@KC#K5FQ4-I>gL0LZ zb5Ofra?_yq$~{^pRFXJ~>ZOw1cp#=BK4+#)QBzXzY6rOrppD1`k74@Mdcqc_4TTjH z5x;}3Fag$Xnk=@F>{7|DpUGF&@V&JqFxJHd{uD}XOJ}J`^kJ* zRILVenR!~w{IS6<554c^~c zL7FOfWoqOjBq`e|ON{h>Q8iaQyB2qX^&DsmjP9(TxAHFV62^5Nqq&~rm@Y=q*ySK z>dtkjvv2*vWAjlYHjajP-GE={oJ?)(T4qdq*`GCj zL`4deimLf$n=ludr&Hl{K_T6t*&Dc^pc+eiT3Y$pu5j5}{S2nf8B~J-gN0?~g_!;e z1t)WXh0Sg@2^Lm|I($`ECzn~%XjzQMM1upOp};`(kSWs{lW3&MW71df=?L7^hpiSu zj~83rsmXE-3pDe{*J=n9UY;5G1)Y9J5c-H@SK1IZL1?^A1FB3#s9;ZN*AJbvrR^Ve zm@sO3oa^8-=-BM^4$=b3qq z0ItMJzfJ3xAp2Kr~9g{B!peD9K=v-4lqz%y^GK0L4=EDI9!eiN@JnZ4Ny?^(M> z+8nteOEZDegz-dK1s9T|&^*tdMiJ^vCl1>`CyCX67xfX(cU%KX#5PK`M8~SZWUa@- z=TxTgBb!H7K~C1lG{5H`AQk&Y z$u`Wv%P6KhCdR=Nx^HDNHJgASkKq_XLbaS&Oh$o;9Z%uq3YMPAka-oEdcbE{uESh& z{#@+kq;8}4N(6B00#N=8k4l6pKn8U*Ti1g7H7Nv%PsR~}; z6uQG(8`F}J#zEH&eRA$P6pWG;o{{ckZZu0-rfg{Kis%Rk?taZNN96B}&J$!`1U^2l zTU(5DDKaJGvOiFSu&WoPQ{p4u?7_+i!lD~*6?gGY`RrR7i#|<8!{n~@B`!f7i?(2* zw>t)5c;zXQzH$m|uAgDRnBQ0mXfO*nTJhz9C^#Ie@EMUC(9>Y0Yo+05jtavifik~K!{*Hxj4 z8s?XJM}QwNjMdsN)CE4xX+flX4!iM{@oU*P&kG4Ob?~pMHg2-BUo4-?J|bJ0FYCJ6 zE!K9>tveJ2JcBs8E&7A;dz_FN6GUFIK|~R1D*R3RAlh%A`x5(cX_F#|rF|7Y7GgD{>Jlmx`Qe9QhoNJW~Bi@{+v2U8;D}kIrLs%{)7bLeo8|naY=@gB39;r6f=`9cJl{djhFr~(iIy&8r zY4sox=OBR5_tc*dz(^PLI^5M>Mdw%7CW=gbO$(BdXk>n6T&G0)T-B_d$BY~Pba8@# zd>b=Mj#y}f!EtWos!F0|Y+)O%fq6`8>G}&pf4qMq##b$lm*V}XPhMb2h-xt->+&D- zS%6yat?A)OmR=0*4a^UUo$!7cKM%If^yLWvsv*VUQeWtsJ1{fjYgR~l#dC!J2AKqG`mRvhs-%~UcL=yQ^@ zQ(ssR{3%_Xx$S5wLTuRxyoVi#V973`%tJ4VT?DF*&j!Ar?ISIi&(`w_4e`TyRGEoq zx_o{tr=x)4(%O2JbH&on%lw`={{Shf?s4>UyjO84fG^Iu+Im45dO7=+FoPiitsXqt ztd*gfK zI(LtJlL&i!gv+A>N6*Pun0YO4<>xMgt6-m9Sbv?sZ}cARy7ILYdkhWj&;w%&@yxky zGkTwg46ATk23>vq3+j1xeFxQged|PW~6)J zXcPoWC=D)wl7eOV4D=$4I0)7X(TQC?$qzXt+A1Zt{%hIgnG^E028MvE*#3}%$Rq6( zS4cdrgVNB_T&4}eYXoUAP_V~9KtPZ{F3Lel!CK$?o&f%Z0%(A5*aKPX+SnMH>08rU zYwMb6S{a&a*&3SY=vvX}XxaqZiAac1^^wa!QuK*XOHv1Z{|W;12M+WG_t+>%Dc$mG zoDKlv4rp*UI2&6XM;c8t8%--+Ytt@1Xm2`1@WIQZW|ZhmA_*355%N8H0l3n@-jJXQ zMT_;l-P!C%nwNvkX&GZ%V;MGCz){JviTmS)ZS86WQ1$stkS%Pu>uJt{xZcjQvODBX zXM7SxJ3^0|O|+vvL|kVeZ%T^+gFFWPKmF|3Rt0nc9dQ6O7(n}RqwXc&O@<4E_M1cn=Ous}dWKRN(7r+Y5`MbI)d(*Z;rZF4hg8!JsiGaGAKEp2sr zbzRF}`}8nT8sdjm_Yw7f)AC_r?cWkjej_#kw8oZILrVf^IS**GfcE1^_auXSsE@xH z;#ZU2#3dSi!T@Xl@hPC;{~!a->7GbXKi~$AI#%YinkFXZ+JD53|AnYuaW@mS#I>jD zZk*}g;23_z-Ao1uyIS14ab16dWBe6&;~OAIVlzB|a+m;5u-#IY82Yxd+UBOF=4Q0o zniiVchBl70+E%(chBl_=I<_W%n7~^7CRnS}-!-Q0my-D3q%izDDM)~Y>CXuEr{n&i zE{u0cxxN^Db~EF~05gv1mc{)4Mv9@Co{71=w5E-Xu9evX*1BH;C_v^zdfXx5PD|Y{ z!TsM$xYJ4pRpT)k0PE1O{5nI=|3-qTrh|pKy{^>*2D%@<2}p?^GSF@Kowm6jJ_rAA z;CGtEwkPtn5x^|&fW+q}@A+}0dkz6sl81T@$nJEl?92@xvdjJSK*WDVztb@H)A^A8 z75z@j+*ksTxcqT2{O^2oO7}ed8@jzFU_L2mn%L?-7=i$-yZK_s5BL5z{+-sj>FeG6 zu_&cYV|WBuMx5k>Q>`-mJg( z_c8lH;(zJm6M$##>m%bmeY|n*{e4t^ka%|=Z-{OqI1zXNuiR8Y1c3JANcU{No2YGW zWu^P)O8)QsLV|i*XF!-T)cvbwGX*4`x@NzQoj)=UK$m`u#@lI@(ZfV@oqvbRdk|-3 zZuNJ#+ar790UQJEf6B!kpGB<41lc%D@{{fz=C6a*QmLlN=y2q zlfP3L|As0XIP4P-2>U0Dz`s;GOU!pt|CgixH4PcmbquX^wSOz6Zl>YSaQa(@=w0>aeo2<63nCmyh7TIgEY z0PKEO2)%!Bgz?;g18f4_EWiJKa0p4 zecQk&GI!$dH}K7xcQbnLUQtkF@4zwA-jBPP84G9GhIjzv&88S&_WpIGd%pZXoVlhB z0P}OU1lVS=1B8d04aHw}d~Tp4M2teX093$lW2)*QC}3|`^Df*?r+%o6^H&dV!Wf#| zUn>9JN8?{vH_6=3tlRss@(;0o>}TIKP0Vzwqw3M#QJZJA(MYV+yCqsi~FgJ|JgBP4?+LE zV;B|g)tr&`H^=-MaBe%s@*&oLb4>X|$OjyAv(H>hy=rl@oq4mbe>>1#-AVnMVgR=7 z-RO^mIcW;G0jN`X@826j9598APAIANr z?5}IUO;P{jNcVh{`~znN*blJR)G`4SdDw5YyW+wgrZ22?h zP7mMTn~A);F~52CrZ+z?nzxtY;o=_{YjYD#tACu%+)#f`X9#`_7Qi{(lda?->Rr+K zhIfPbkU@%%L=-1d&)gRuYRox%s1|Jgey4`3fkY##QGTKWCf zWV|!w{B>yFo;!69@_w5;zlPS^-nn=X_TRk2U-6K_4|wO-N3cQ!Aj|7A;x-x1#?qHMk`@F|zFs(Z8I3zWd_WF@8IiKW_M2 z(f6khKkulq{9%tjf24c9z6<+5pa1V#i?t2lMki?rJ|${HGb6y3-5uKqH$&=Y8LYQt zh&J8P_4{Pl8`>D?SZUg8n%tGZ{5;HV@7=Mr{4*hcUXQl^J2=}d4RG52860peUDt}* z%GN}eTH6+oi`^yVXFJ`N0@v}+q}W;jw)Fmml-nM^==>*A82-OJzS{jSWc*i;PxSsX zA@_Lv^DlArwlC`X{~7%6JbokPevju5{xd0m=kbT7Bo62wIc>6#s8Me_M{+)IXK;cfP+_ zHoj8n9y0^6Tlxsj{E%Z~plfBSY4S&1{M*HVZ~&xTq$->Qu-YmBs-4^U)yLl; z|E0>l%|xI5wPUxL|F@1lv_{@$s{IY~{sl4)@62!+V7M#sUvGj*|BZQjZ~cE}{bTyv zSEP*k2)cJZ`m=h!9gIKE{c45#>Hl|M+g;1bjn{vasSLMEQ*yaDQ%L*#oYu{O4L7|5m5-AA;Uz=bvqWa|;K(@<1Pdt2TbE)NgO33O~g9 zr^UwY{f^LwkPp-vH+k3njv2e3`ftkg+m7K_z1Na=UwXLd($qrL`9V2{O~3kGy)p*t&g+p0RjE_B^o1ZbF-h< z{2P~bw_C)a0RsW81KN{YpDPD^?#B=UT(&bc)dhT+;MWE02Jx0&CKVYB2uL3a2(TCM ze+S@8K?gq}0GCR1tu#&km@vOwHu!%7nUZ-cS5fs51H-g0jNq+PFmskPdi+2#1-LXe zzqBYBI2aGv43(LjkM1S(O;`w%mfvDTcnQmvERadC@e0iSS1M40KtmIq@;8=2uKM-cMKg; z{~6tPMR(VA<9)xo|KIT88Np#r+}AnhzRq>u&w11okx_^c5D?BFcxVM{ItwmH1E9aT zLN6@nbp_yH>1b=mX<=h=p{)i}LIU4=;_U|R0m@rWh1MEAL4}@2fIA(np_3~(;a;9z zwr+l;o=Nc!q{uh0J)e3LC>fJpi6nMLw-d(0Th{E{FS>I#@M7Jw{O&$oy8&}~Mlq$` z;nvu~d)9HFLvSAJxyx!_wdQ&jnB5fG3NPq|1HBv^olM*~4Q(6^fdB{VR!PiSZhV=E zqok)y_zdz)6sq*s?>?$QyZ9i%qTz$K-G`Yekif-&;86?0!r>2gRJ z6WWk!=!FTr>?V&!B}et9$hdNW%`%e!l83hWBX>}bHunk-joZa$%^ zi#h3M1iV}3QQ}YT$90XB|TLD z4BzM{z4o`E9|ZnN0S*912N=*|ZFxA}+}Nn^z&A_~Wl{~$6t1kJG6qkdzh#A4)P(cq zg$u=%vuJhB!l`=hiVn9cCe2I#-1I|J*IJ`b0zouh@dg}b4ng{=*&X<=bC zFwCz&_Ji+wKREXGns~jHYu)e6Gm4Ywd$tOUvaN={ei3=o)YQO!Tm&PC-(7-!;$3d8 z6A^N{n_o*BPJWtm-0CZ`4mz!n8$z>DxB6&~NQr8O+Ia%TF_N#!=msZWc<$bl0@^e+ zqJ>l;nU*|I&8TlP*XwVr-4?!{#gJ*TJ<}(Y$|y2fvA}rF0xh0zPqlzHBRSamqt}q= zHywN(6~n0l?N|WxBK!3YI$6NF6~fsOI`c{`SDkzO6-so=zR>>F^{Wy^4|qc3aHq-0 zH6>=EyhRBKvyHIQ^j=d}V{&tPQnb1oZkeCTZD#a31>joNhB z2$>A=7G*F-X6B+gZ)Siva{L4}WZ38I z(Gu_oP;aL~XD+E663+DLnF}-e@1o21H=tVst%&Nn9!V0ka|4Oe}27 zjBRZk9D#-wHjWNlzx*%Q>ZKmagImjuebpu9DVDbO1ymWE8h^FvH_yZ5qFa^yY8UJo zNaHI{K>C$0MNpDnmfb88ARk$K%bdFXW`NBWg@xh`aUm`K@Jak6Y&-P$VUq*-LKVUPGI*hSRCD*WzIsFR_cA&QAq8r}ur2e%6s?T+hY zYnYzge#vFqdqp%&fT8lL^J}ENQ%lqUw4(&)U5Hts9Ry<|Ae{ehM~(G)^sPR*Q@(CK z1Y}O1YmMf#q0fEa>2N>7l5wap=OpbwO)E9HND?}GANrp$4yTvHKVFyF|FABAJ&Y#& znSQ}WrC&i&9Bh{OL%sI7pKWOFW&*V3`t%AL63G#gxV5zSS9irwNz8k06+cerF=fAz zd@(siS;^1R~dZLZauZ1c^kg218pMBb7UV z*T#c(7wyk4S_qll)9m)H>>2^3?l+B&>ROoT^y;#*)gxSE##OhS z-8ahYcYYV!t~@Tv0tb_iWKG*>PcF?XE+X8_AtDlKS zh>B{5B8NhI&`fKGs=Ruxx?Fo3n{xZaWUxQCQEADk@?k!g-n7k;4O%V>^!hT+KHKQ0 z@_!;YpMU$z0hkpym|G)eTHD#=Ia)iq*jk!j_9=IAu_|=Pv$wQ&-GI`AZoZkX96W5G zLF+(3g1tb$zF!%@1^_g3gMp8Hs=+Yd{i6-$<@z5sn3wC<8w^A6M;pxdzicqyZ#4K@ zl<1&HeO_Zf4+#eV&;sD_iN-@YueGhow-EiY0)V!_-z(tTaq>+IpvykM=J(3@UN3n6 zYA<-X{$eknE%?j5;QOn+;QOn+fVSW_d$Cp8|L_@fBysYe_S&CEl(DU~wXF@8G1N&K zTR6ILSsS|A*}4FL`k$8_7_=(`uw5Pkat*n^B!SV%5JK9;P9!2(KYG@su**}VZbh4q zY>kD62hyy?Hr|J-osF8mZHPn|zgN`gJb1f*ozdW2)_wI{OjRTsgG}NQK-t4x2mxVy z^_8;*5zBd*-WW;-^cdWz&%4#DQ5d-&pu{8Hm}fPB79fYDbdSaH9QQp8rSpi-tBJ}m zb_FQx{bU`WN!vEZlzCNQ`&8>T$ko7-3W3&-a-3|G6IX{Y!i7)E1?C2Rf>~ zpnF!>mi6>H`R%BNia-FMv#rIq!@8Ol8>JdLtW|4{GwG}v1z!a^UOV>eR7if{^wIDn zkt~PT1Ls6$Mv78TG0;aa&`0Yfo?#FkPQA)K@xFkMYBY<|p61eT=!Wrw0)OKx{EHcM2xl(X&_ZoZj*fsJ_&YW?1_Deh9HCss=`)FT z_}1vYg|mx51x?&tI?gi^cz!wSKYnkJk#d>VLRA@9$XH z#Sl82Zy8!S0Z!ZYG_(JfHvZufKiTbXE%B30PL~MVE^(SM1X4kJO98#UWtE0jR<_2U zwqZZ<<4=2~aFzC!7F8A4wkRZQ1}3$K3fzS5kEXxc9l>@s1)goCKUy~LZlbCig!*!h1v{8MU_#>;n^@Tzn*6k*`QaVv>G=e+HnhN>$JiG^ z_;krX?MU^jc{tu`|Zr2{$3X>VMTvy0O$vPwj%TQ zJ`L5*Y#e?P622)1Ha<_6^BpvJx&8(;c%ftFzm5hk*MAcYUar4_1~1ol(16Y8A4G%i z??8j^e}M+ye-#bBzk~+g_t1dNQ0#`seF=yN2zz{p-_HM^1Pj=LbBb83^yWds#AlFl zM~$&h^kpZ{;R1{pFDABK;knPGCV_}b1b_M8+n|OC74_?V{2#&f^X3SS>pySw{xw`b zZyw>e{?l>&mWzG~mpIs383O-KG78I5_{<}}D*AWW=-@YO^a~mLwNm~p0fNUmc0b7m~J!44%oc}B|YxY zi{xkW<7f^5S{qt@&X4|_W7EN=!odnnUhk& zW9xebbZTyuo?yl9q41NUwB>f6KCbliEMq$Ps(XqV*K@DLer%a&#m#rxIH;U5Z&a}) zq@bq6=fFU(dlh4!-kOQ2F{C6`U)lKr=Y_D{wlW=kQgM2LfCmDvSv&9h=L$)8d6uF* z@yW%NvK`$X9ceTc2cdciRe9Eh*#(XC3GSmsWExoS`W5rXlroM_MO1&AhyJ>yc|T{rLoIz;r~OvZ`#rq) zezg>sPl%)aXfX#JSI0`HqtgFr%hxGgKmD)1nW6j7;@ip~G(}PZU-}(6t`Gli6{QHn z*)U&9g-5av?_caMZt7gWeFUPMtNVDgR(`zvx-jF~VbY+Q4|$5*zQS2617r$~n_y8p z%old8VUk;67WjA67#dLdbMN7d3GgG|es(M2DFVe{wSU7k&NcP1*URj=RL{@!rOF6D z7_3I)zZlTrec_oRd8o^o zn?KsxiS)rRSOVuxI$;owm4URwAhzc9N7BJau=d=CvlHWJlm!RRN9EScHjB02qVNC%sgXKvF1}nV{ zI){3aU(bIei{WI_pZlyHf{lvL+(i&@Rb0*BL4T>L1nSQso*orH$vUv&*9@Q zX&L|VjbEqZo+cEZ-}yEm{jEs*C0OnPy@})R)M&qis85&t2PN7s2?M`U$ls;V{<;nR zE`jz-!oaU|;dk}fFBIpz-&Kv)@7EZ_sAHgT@a?v)@7E4=JQ52&)=LE{gJvfuRarzI6O-{6w$zgd#~JzU{}?7u^{ z`&MfIJzU{(?7suoASkA!SKHTDnJbuDMWJHLv8Isp_+R0}a9T8f(A z;_*bnB3EJg@UZvz=Bv~WME8dhnMQ6ZZzN*Et}tI+qV-|85F_4n1}jZ~_Wqqhz%spH z9Z}A$H|ygG_a>ExTP){$taK)CHU_zgk2DY1=8pt1E$sArjq!&H+fXZBEjYW2_xeo+ zqMtwYb&e4*Bi=;=*>c>G>-uCk^_ur+I{np_dzPyQI%c;B7Z1ELyvP;>zNlcZB(^pJ z-Yt7v1cYt@bc8Ry&FZu*K9N#h=;;eULu&v`KK`tG@nCv$<0I7EreWAWSwitkWxv1B zq(EGFCLv^sbf7TIFVS;mVjo~DHZXKYGmOi9$G^hR_E0EFj?Rzh`e6+ivt4?#Tl6Ac zhxBGHW)iLn`U`2@H<419EKcvBFX??(u~^b0uaj`3M@lKc-+-G0J{aKB0KPK7*F?BC zfICPyHGq>MI8TH#Wcc3T|BdZZt70q0pWC+b!MCmO%`{v_fD0&a0R=9gzy%bzfC3j# z-~tLa_LP|TlCT;b;$KOMk) z>Hv|@!;_>S4Z)b8gzhaJ)?fnBdq|kfsowCH|Gf>wSv0>ECr+cx1I;t|>GY}7G4)@f z3_H#oj`Kfx41X8rADqp6@Y(#opP+weUHmS}KRBED^x?Dl4;lUc;E%t;IS`r&RVou# zS_56J)e*ls&hN{r4Le=_bXHhcIFAhq&zY;B6eC>xVEM&2p!<&3hau^<|adjxPOz zqes=klb(lnDPyd_hZ$iboxI{GD=qXas?-jdJ^HcUqBG~qRKzwk6L6mQsZqD6Xc{hY zZ=V>tAb;((em4^g`m`3hUqcx#!u=tm{~!GE zS2%xr9@>{%ZchcsU{6duEyA5XPz`p^Zupz@_YBYne^#epoH~@>+QPw@1NzWF==16v zjG_N>d_HL7KQ}T;T_Zw%cbkO4_9T;#$O@^qym3;q1@&CxW%`HK%|Zpedtr?! zzBfn#Oyj7AzGKWW7$R6&(EEKE3ELeT6m@}JRcgKH4YE6z{4!?Fbh1)0uQsm!z}PH3p~sM53|6-EbuT3 zJj?-(Hf19yFJ*9UifaMuTSeQ?+J zuXlZI&|^{MkON18p+YAB`fwwbp9N0`M-B%^TcDvCfXmp*$-xl-{Ezm@Wo6;u$Z26? z(GkvrS;vii)g^_ViP<{Uc%buM_!AUEwwJ-|sM_-LE0(qxTL`)*Q*%Nlv5z^6^zdQ>D!7!X9>%Erh{2 z!HY}`294)Rg;fXY(hIJ!A5aOVIe1#!ap_=1WE?>)_98w+{j!bihIUBJhD^c@+L1!& z^#dK^{kcQdp**;?64)r9DtaynHMJhKH4NWKCqcspT`oYSr)%zvbEm__@7V2G0nT zyf>`49T5nbt>?>xU7B!MFGW&R<%UaboofHL2i^Wa`}uxu|F;L;o<13w|JRU(YT7@a ze9gn<;Hb~TrKT*Y$zfr02l`k^3tO8{I(Kg#4g$g%q})+s_>(37F%QxF>Ud<>*&C<9 z7WgAzyI44yn*a@646Wd3|6}0&=D_V=ppD(|xbK%wi#@ey|H{<1ural=b#eFxW$Y7u z*~xRb03*hWiEUSS?lY-LAfgh%U;g(ts9{1qG4ddABIr)U3c*7>!+9{N-C45~H2>DfP&kA1W2`6b+^ z{q>L0GhQw@J@fwOFW;Yh_g8B;Y!c77{>eL7z1^T!MN5;#6x&~Ha@f3rou9>Mj1wkYb}eIu|`J_-?{dL~GwMLsz|0hw|f%j|qd<(E+XOU6cE_ z>JkPb%vnU7%W*7(vq^B(jeX1+~XlP|(=x7MT%=1=D zEQJ7e`6k;IauO3jG~9EIqqC#5UZ6rq%}TB#FtJpwcXOL1m&CeAl`jlQA{T+EP%QFA z`f{e-leG77?=`4A?0n{>c_B4*^oKUI4|$JA1FdulU&W870I$6qp=fd87W48TIAPIO zCx6VJkX(s6_EK_Lx%Fzy}J99%#=~VM_oLOIo!4e zFG*$PY?wc!mPB@h<RW4Z|Mq^xhwatLk zKB0!L_=`wWF&2@561}b#Qxldgf=8L2SF#oEqQsR^q?e6z?7F1uS%pOumSavCnG;^9 z=kM#Wa;wfz;}`WDyK&*(bH>nRq26b1tP9|zKu^Jo_s?yD{fRIl-}fXcwbGQRis~X= z++SAew0m&->N91@4+tMpA3jWdahV63fVlOSLmQ!=j`XYu}c{DfnY|;J>z=D+5eaFG&W_W%FqI?09Lhy!J7Ee?Pk@ZQ! zX5sU>J2^lC>$qv~25r%+edc+F*W;JOG_y0dG59-n)rDs5*UVn|OiSjH`%PK4UEn9d@|%c5dwTz!l9oXO z;KO>5Xq1YZ6RkM@U33a(H9Bx?G|~`N(FGP+$^%vj9c4|rmZ!vL1?=xUi7!@M<))HN z=n(1ZLIbiFC1kX*m45U;KP~pUGUXkCLbnQSz6lWr(}dUl+QMex{zl*k!zRNrsZ* zWSH#WRq211Z)M!2ynK)5>vbf=XsQFY5)(o%%G2wJgE7Fy5NKg*p5YpfS`zIygF+xN%w- z+b&n2y57_n;1^x@^>vl%Mme*pb~{ySc(QvE1!I-^dF2b zqPT}|a&waHjo0o;&rNr=OTD86`U$@4VM?`cNTVYM!ROf*XV0@kR_L=<=Qy)yrY~5L z&oWq1%rGsJlQ1tQQqof<5j*cTXC*yDR~Hzi3fko-9uq0{S*|$R9s`-*j<+*$R+(_U z{`f6J6+Z^y8aS2_5u!UXDjz@4ay9T|_$sOHm^$f)Zr%8yM#VvL8rt;kvix1l7bG(P zNHQk~GES}{74nqv$rQSZl!va%6V;f^?p-W_Cp2PF0jMEMoo#hskfGaoknMF@`_U5C zNl?>8J3@ZEE>t#x@dbQEdXbcb&+w|PEwdH8( zKQgtUYXGeuBi|^%IILhIxpHPMcACUFyzK(}QnYeOX`Wz3ag3c8iRYZ>U=BdHQoLe! zHLtYP?}jU+=dkwKLsW}V`uyG4b9aX|z=xju2k9xUwifOX-A&Ly5Tv5ub~a-F?}#=iN5K{N7mBMNCvvdLo#he)Uwd`q)-W|pE|98 z--Ih$XgbHpI%?vPWwu`8DqXYbh>w+ZS)7TbG_KWy9GnEP(geVmCPl6gwbBv1 zSQyaj*47Ue6OSCN`e-@VSm_syRhy6%@VEwAHBpu_kY+%~b|pwz$G0*?gik2_Zhe$^ z!h24vUfH67a0AN}-KdDI`vB|tBrWSgOI3?mc1LEWMcVzc$R$mZ%ln(wiXsm@+F11* zdX!@=RSp=B(6r8HC+nBwviFqRW{GK=S!W;Y%}INy+*#zzTAbssdS+SaNwgp}-_tcZ zSV^+I#&EQFp?+YKSBD}WUznH|XCNsD$J0_AqSu)YiUB<)UE#E%Tn8G38K^vMX6FGEiP1Yu&{;V%MiAyH&YUn|-$5d<7$nV=i>*ZEoTV)0Jdn zWZ!2Mo(6FEv#T z4M*Q6>t8E7)-S_qJ3xuLyLHa=&H6ch+%}nfev$&qiwCj&{?zrAGnP4vODgWeMwxk= zj3P#vox^M&KVl_ce&eCBUr}>1T1dZ4JpL|VwHT>ZeQ0>hmh*`g$So^iU++T)DL)b8r$> z%4bpM^&*I5-gmKv+C)CIDy;1pzA!Yr>m0*)w4kt?U$RqnPp@_){AqGy)#fC%xG9yt zr}2tjN*NCb+Y;GQKKW?KMVyxjuYhTPIPTUtlcI@BWwTaeiMh6w&SVoctNFo&hPItc z?0Pw?4|RN1W|D{8bG3JsF%1VO4`Oq0l3pmawDjwP1+ECH?vh&dv^i(tXcI!;YaGs{ zxV23(gJClK>bTO%b1;SN05$j0Y_8?w8|6)M5lOV##B(?wa#dpjoUQW-w|Bw{4Sj<6 zLrVCUXRIB;nuHrf{8&|0dv5H@)CPJK&RYw9>9X^WkS$$1I@e1%26QU;hhCM;dRPx8 z6N7^KW0#*zIMss?29lO91_ACXahjs>5?gPdh)@uZTRb!CqRQ0PKeZLL$;>{^4(1NMN+{+4%*jpYq3QYdRbK5@&+ z?l=-HNX{sVi%(`AR@9cA;np8`Rw0cuU^-XvBI&&)=k9y#f&RSrc3u;YY=XKjtR`*+ zdW@|xQE5?(`Fv0`6?x}h;b@~JX(}M$5@H#5*8XvLVV?L9yY{ptn-y_Fa`VcCqr$dF zyGP9ka~qbN2QI7&xy*a#`ByIFzrSc1eO#_uxg{-ZyEhrAO`2uACAukXvBIz9Qm699 zkSotprR;$7HO^&mWJ|u2=#wXfX>u+vSgi^#IYzWBU&!iMLE9WY-$pgEOI#*%&13Xp zbjt;5y89;+mo*738B_?a;(Z*=7WDgQiJNgJnD#`>+0LmKwVu=eJ@FLD`m_eUKTqtT zNWCf(KY`~*mW~Ejv_stbiv}kQ*_a}fclnnM>jlPb85;9qJP7KAVxx7#HPJV{40E=3 z@%tOyqE_e|#rUy1YaNA_0rNNa{EiaiuF^gk7j0 zd;(5+2O*-_c`?}FSg&jjxo-q<+TML!y$fzVu3+fAU%}9yuTbd{ysJ6(HpdghqdsMmTW`H-9V1HC>rDByBz zc13iR$>vBQh{Us*YzPvi)E|bZ0P^A-JFp9P1MR5fos(|w1rr#zj#QwV^QhisTV{V> ziuH0Wg+<(%)c%#0P)f9IS_v9a8p`ZBXf+JZ#9g~!HMgowhZM=V@RFWo=a0K09tKAh zZ$oxP_E`dpwHic6m)4>==CZ*c?#-e1AXzJwJeQt;wWa3OE0jYXT-en|DtQVckZRGf z1Ll1&OL!w7*&X7L>k_=s0-!Q_v;!Fp*;gdjk_F;AmotC@!aVWw==Z@bDSjNZE6GDB zL~Kd8k`k@~NhD|k-K$jVb$Nm=%VM`TWxo4Hn|W4{!Wd{bA_^QJo5EU-NgWQwWiB*^gbf}9B>7IO-L``w%T}Wmy%pht z{+*?p6?L3-V6NPX0Ro7N!D9$jp0V>PQQJjfmu0FvW$HP;deH^v`r@!I-3ph>qX(l; zKTr*UY{K2_Aw&$aRz4monV^*VU9eQ2y-xqcUG&-4kmP0ZdBL8QKxu#@WR1ysHQ7Ye z19zsBVl}zp&0uM?@(Xbn(5_9<0ud0Lzw0+TGB1916HD}%WjAfVO60l{w$KnuhZqZZ zv2M96d2ND4Jl+iyF+SJ16E2ALL1d@^*lr9798+y5jTW>nQXfX-I81t9Zl4udld|g{-WomMNN_qM<$xutHCW1d1erY;nFj@;yttZp*!ca zhx{CIM=8R@6E#zqcjs)C^^uDdqbW0$>uSEceG>akYGT*y+9 zKTtCGKzB=js1%Y&DdU#HzHA7F!UG+lHa){&0ubM@NU@pW2j(5l299aBo0n#xyB4L7 z)JZsOptuGOv|EHzDTfZ#K1TVSlW#4#@gDc4n;KEaL#W&ci3gsI1Q|ivV&a=hjUmm3 zl1(qWM*G1+yNVUIhvMbonbS>!#V++$u6q7n;1U_FtkvWc5kZa2$@^O3q#)U(Yj1bu z8jd)}L^k?lKmx%UbOF}sW8N-dcxJNYcNpwz_P}&=E@32_M^BTwID74Tj#S(TTLO&T zT&FQEftZBQj~L^e7AqK1X3Vo}((jd1=#Fvdg64{F#!h00!J>K%M*PG_oU2qq43#dU z4or!-&SB0FzxH_9WfG9=$OmpyhL9T@nxpqQ6_VSl9ICiD zY`q0SSjTi}Hk~0A%pXL631vLDed0U1AKiAttKe)Eb_%^!;o{UT6a20R>=I5`N?n2` zeaAgYUy)K;v0QwnC)E-)O^vyo<6wA~$~`eFF( z1smv4#^Z{6)*Nv^0+R|I7X|pxl~zNz;5AK~*8SoZlVmZ_3y-IPZZqHqinDoc+V&w9 zJhzv3kK3=7x`<2?gBY2OoyN)Ac30=CY6$cO8R8GYc7}ixt8~149-A{8vt!pEqwia$ zRWLvp8ab@$7nyB%av#M;MEA*0tK^mYia~MCn zp>?uIgm#7#_406-G>hrfIJBcUj49eJ4evhSIljHxS)fVPYd?10|HflMk#%*X`Hxy2 zXxnDqUawtNi>p&2cfylW3J;PYo>yr?Y#Rp$1!?zfdiV{Ksc-VP~0yFZsrk>oSeQ=^I$fiS{8PIbthB(j66a?i#} z?_5rlL-8K@nm$MTz^X1)fuyeY;epe0PoJvJDfJ+#qxO@k5)Im^$z0Oc7jC^664^Ui zo2pER9%v`4As-LZ(bSVOgIqWVLRVF4Z_$i+-(i&|N>dq8j~m# z8tuifG4GbOT744>&sa&?)&jB^AEwh31VSfS;T_tLotq_zZ7?+!^#JYSsx)nY7b`&K zMsG6KBU7kO1SW|n0^~>JgKX18ckyrG)#~GoIaxpu>f)72UKv+gUh7H?(~X1Glxl4P5gn(4aNVU6Za)y;VHCuxB@1 z5y!ja(pP@(GStZ}Q;oDZWpE*>4z_}8Mp11O^nHSqE80a9SJHfljMlnx#k0;i0#WF}+tvq-^u*Id=ebovAxHN|wo}a{LXJpDp z=&fnemB;w3)z72Hm=4FZWdVds>ha2E`dQ%)QtM{fIQWH9Ne1?<^?x!rN?M|RD5g(>dkR>Iz3$SEzo-_h5Tc8ag5hl zb=xy{aP@@u(t;}}>@+!&q_lYwX(vO_6PAFD*H*miGp9y)YmwJm7w!(cMPD27zH`%s z?|RmYA|^d2a~Zu&vziw|6&bWsCy7p!`*toSpa8L@X0ZLS5ohNPSg4EfWi&Io9o}*R z>8sh;JYkl~6Lot!cD-kn)PX}*RAG;vmOCo5I9I4;Ax` zg&j4^vbTGb&^SknwLx6acfyY-fSA^HZTvWRhJ_-~jS}l*?@K;l!fwPz+z>pob$z_9 zV70!kmk>NY@XJ^lGcNHr_B}f@I?cOWoU6%JZZ^F`wdF}|TpKSQ=DC4N0opSDn2{?P zAu&yh<5k9Zdo6DKv4iEL#S)>!_6p_M6x(bPV@PRB1}pmv4wyWb8O1fmDnnvWNtnhrHk(z z4KO+$c8Jr*>!W+-0vW8*D_4lSVeV(O{C}WAPwLBn1h5Sq>iC53BVyTg% z-Cn_|{rSa(k; z%1o*&^{<>;%H;*F>Y$nM7h{7`hI_ioy~5SENLtj&F@bu-#m%7=Q{?8Z{5u<&>Ev40 zN!LT)zXT9lX^lF#aO*uJ&|-=^I2>D;e7|59+LtG+SJ#q*e5dfzF35i;dxayd__B7F z)*R7rxQ6{v_N>P1a4f#L^QmJ7ZETah3!xe{E(1%M#+E5ECtW2|ymucyvNa69@!Ut- z|6p|at(jC7XTff`r4*_xmQ|5HJFsKADd9-TY-Y9r0E@6&y0udCUzG1$-o4q~? zSZB?oOv7%>WCw~nth&7S#8=#wBp`1mYzo}gu&32~7S{2BEQg-v7FB_vCpPw^y3DS< za7u&(4t~Szi$vo$MSJSkX|Oq}T5DO}u-%I5bX6DLeW~%HeKBP%U}sg8XZUSG2SFAX!dp`q9|4u4{ewd%%j(c)KI`oBT#jO8q=~$+5;JEiKGb>Vfa>81aV>=q@F{ zE}VPZ3wE-Uuz)-Za0*9&>P*UO-M7(|V>H&yQyTy?w)Iq74o?g~0lBCN`26I*|Qe%b8q z>O3LOBkIn+m$kPqD@x+mtIExd)rdc*Jhz$fNZ_FQO|DtY0_A(H%T%}8ws4P7)qDcM z@fi)$UKqDrINnb`8AHgdd$<^*=F@S9@TCo^)5|P~@6D8;+hSXE5*5^$`cZc_v^?v2G~a(>fwO(ijsL9YIlI-u`Z7yt zY&9JIOvz$oWQ=MAYIoAq&6AWmgmsG^AH+#AI+B+mh>M<)df2&|;$l0h7zEc}_+iSL z`D4nOnjC=kQ&a0!?zp?0*>Tu$+KN@d!&R8&_5|-6_tp^1=Sa+n``%e~>q3k|&YG(| z^Aat&1u+USL-}eK0XU#q4mZ_5WZU4uG(zTRj0uAK$a6(p2{bt;e0teijOgvW63d3Q zf{vI%225zxInjd7&2>JOjI(JRo;KYOk_sEm$KejjZetXB(u{3}^DT&vyXf3#cbdn= zq?mUcTxIwt`bL&(ckG`Oi}bHA%r31gn{rW8)<_0Mu;gk9_1-s}FOk!8mZl;pyA7(ZKEb*oW`K&^H+6rr?M<1tt8;xR?-Ei{ z6*jUCF1HwMop*w5RyD#*?sB63dQfYy+K(!qGr>63!A^ z-HXrT<_mrF6lF-q%c}8_uiFy$r(H26Tx>x!C9I{6`55&?JH8_y$gX`mc;bFF>3b_i9-Z>Yk@?BIYjt%ex4 zb(X)7^!n%)v6(F%+Ql;OHbmsAYH~E!ip)_`+OcXzG+rw_xiSeqL{q${m6+o9ZpvE4 ziAjtsx7Q+=AhhAF-PR$)8$W!iQ#*O@2^(_$qHzs5ZtFC!X9u?gulE8{6nfU!<5p_5 z558+y3Cd@%yZ1$&VomeY6{Bj};!#vS(FImhV>d4&u4QX=@fFx+MBLjN@-3S7o+sp1 zp;_0|8oT&1B4$vrwRT%rPn^ANm>YI*smwIhY zg{pZZb`Y1uGB&WL^AP(()E3`0a7(x&RESzFAX(-iZoTCW#xo&BzA4>m$xF<8H)R>10pv>F@h~e|B*|>4Asf(pE?ltds5VMeYRvpZ z?~bJ;9=e_lW0y_Jqs}O0+C?NGwE078L+@S4H3RQSX;GQ<7g$+_-YF;LokpqxZlQ8P zi0`PU*XjD(Vs`le>vX01v_r_kq};pJQ$UGYgktn~G3NI}kDMeK4ZZEPg}SaWqLb$6 zPJ(!CB!syUz}3*;m9Im{#^I4YMh+0jK0lFadhLd&5pp{<0-78ix2^#)^2S~y9U3#g z0TGQ_6PhcHZ2g%rEHup|wJxsi5CZZR#0t_UI=Fj@2a}6!_h=oCa82d4n!M}rPOI{9b;_q6DbIUzj`K)?$HHbp zN+?qW^YpSbUt_uIYsYd;da8tR(>4UO?;v!MwQOF!p5l;6N}D2aS^s<-@+}~0{@t3! zeR?q4==Fk5w`y{1H88Z#qB7>}=vfBd3y2Ri8m7H{5&MvzcGbOCck>I^KPbL2m1@d! zqO-|pbwOd=EtU0a8fo+#mF@TBHpG>GrcJ_0#)s`XfZO$BP)Ep z*Y=Q1g(5NBra}$8cTxMM!$9-6m1`) zGgj{DVqyn65Ms|%o4jMnWkqi%%{9W4+o3H+-GVM^m1?7d&t}}lZn93O$4t?B<-R=M zaxX3rhg+An$-6HTOudRwfblGet`4Dt%YYBt#7#Q0OhN-!uw~L0LV+B^7HMjjE!u*OTpmFc7!cQcOVi*biMH{nIA&YN*EPI6)129O*aEF`Nljm%e9H6=)@S?7pz*YNpTvmA{Co(XY8sD1NAH)H!_ zoRoWM)d-1*N_?&8Pjb#5r5DIPHc-E;`Y3K%Ubr#(BZ1qQ&A0BDSZk^KuS3c}?@rt> zGN)(>x%mXyl5@E1k<9xAk4E@g(Y)$cJs!kfe#(Y!A|&l#jhdo3eg>_(D}MVT@d~04 znwpo-qk>Jv&7&H72~yW5;}LQdM;(;(Y@)nQN%6Z!vXyHUc7f@YmK%cWi+3-Q*n5kt zqcpe?gp`#MrgVe};`cGEDUv*rUQnu7Ny-=~i^jjve*PlYQ;*dCY=He1xq}g(%)1mq zvDp_FH`9fxz`hr18%QtrY{tK3?d<4jzGn{{>#k;eBa1sfjonee(&LAik9r;ibuwGx zGut!sFAN*+C8@reeIQg#^Uh#gY)w`$g^1TcImz|74fK}KtK(s(N)n4VpY`qL^A}kR z&OXHoKC!tq2;5n`eIp#J91X7=9nyeYRm|oil$C9Q4C+z3mvx@0+m>o4h04uXy|X)3 zEVYxuk|(ll zGFc={w+4~>xeZpM0~0vu(YhV(P0<$YGlZ`-FSk-iljLrq7mtJii8=dhJAztVlEgY$ z80MCnLHm!C>6MkbQUy+49p5EFMhOrc9NAS0CA>X%cF{gi5RE$|eEVJGwm)CJHxYx$ zc5yp(_XA|;6s@@BI))RDtjlTWE_9`*UF5B+NtAx|B4((o2@#WCuYMLsHFh#IEZV+X z!7#Tgu7{;0;WAzj(3d_kgK)VOgMm|NE+<;x)}U`a&BXTmBw14{!rKp!agD3#(bcN_ z(3=qF9{MQWd0ix7P58=fW*pz1`kJX-;H@2}XCt|6Lmp4TbHK}dokV75gle}jiqQvE z%LXWVrB$o!M5K?YrdU48rJ%5f@sB za^PcZCA8*Q$<$lPQxey-#}Qs=cF-8|ofnK?_o1Y3_Vydq@hjoFIvTO5u)D8l*12*y z36OdAscrYt4U$JJ;|h6%8(fRfk=p4a)_7^dpwj2|T(xOED1k~r6VYbP*2WU)^1Kt@ z3+Y>BV?liUVu?a#=Vvjq)|#^nC4vd470)iM4|y!9~|}F+6IMja#@V=eU;X#_S+hyk@Z+Gv{l?k(ZG1R@#IJWk*xQVz)M) zkp>A&H2Hk=2=csq50SILJL2AZG3I_EC7zV!g;{+2<3Ld}W+PgiB!_1{h@9vbNmIeE z%46ep!{er^+nG9O(MSUVsd1sh#KoIC_Z3%($ux4e2ZxT=c7-_}^%`{&Vkfn|Pae*w zTsHnd(xmieBlS*&UTL_-94jvED`D1YYvy*t#bkN&686h)Yny zfd0b&x~m}8MT63U1L=zwxyB59<1f#ySGUhH@f zKKUTBpjSBJmi>gFL6VULnvIOB$&P(+VHpBx{0GS-0sjL|vAMClBtrWMeARTVbN?R& zXW`If*M@QCUYJNr*G7nlbazOHfQU+WNq5JFq*5X+E#2KU>F$)y(G6qdx9=}_o}F`c z?(6#9*ZnZI6(tU*o`bw7rhnVmM6%H^bcb^l5Z-1&$^9xqmaBgVeQ?I!reT;5Dl#Xg zNuiZ@VTd>X&+<_i$Nj>1MTxBWq=pz=TLq92raKrj8>Jj{`)e)18FCkb-w#+B3fqLu zQLqTfVqN9i2*aC#X@EFyJXS5+ScQi&iC6nQq~w!+UZJkTwqYAfC*9irTnSI7uBQ8HdT+87i>q zFh`c>jrMO8NSwQ{nK^=hR(06A6lYi}#&<=35}}JLuA+PBt~iFTlO;8|M-=T}j0W1!+0IB}&SvDq=E^&8N{Xau5jzH! zF(lf4%ewxF62E^3uK$yru}Dy;=ZBYl;5U`8Au?{kmrqqq!e66h2~jUO%w@c|Rtd#i zTnYR|g=7c)Vi_q2uV9quJ;Da=TJ$-?n}C!(uwOE{UCi4QbYMy@bb>HsWWl`~9Gr1{ zpN$R_{ta?#_j>&C?;EaDcoso^!zcsaU_}(Ilt5(eJUs9}oyhTbkNs!4X`kZK(yvq@ z+9NVTgElW*^h|p^&J~7H3@uwQcib#T_P%%d&e$#crwVRf<|S_=f+^#V2Io0Jbg8M6 zI=JQQ(kP~3!ySG^N*7~L42Cz{PmY!vlLz2pJ9R20qH;Po)yEhF@e*SpUw}w_dP)j0 z#UPVtq7Mr{1gMU3#c{y`tY|>*@!{ZpjF|Tlj@D^#D0V;I>f z^9uIT2I4!lg7CclOVqKl;h;a4O;z|0Eq|D0P{Aive<~zV0UpNi7h%uq{}y3-2d5m} z7b2=5&fVwXX&*@ok{8g0M@raCA}kQ`;&KC632IX9zxB9SS^oIs5FLcO0DJ{>Q^T;SpJ|2r^-s5Gik1&|Y+}8wCW>@ddlg`y^n@pmJDC{fVHcp?Q zN4*|C1~7T9rIvYwKK2s!21HYZ%|vP3&scM+U<_2tqBN-_m==P&I`bi4IzP)`tqegg zms5S30@lON)MKO=ACRc&faVqB8wtKKfw1R1umJ!V^7CA-pR|877%h|Sq;~-;bE#Jy zx#&W5Acv{OEhI~Ms9+f!_}QF%^k7T5%DW6NzkTP1mIB_^BsLqj_bh~_auynVFN41A zJ3x4-&>b+JGCq+_)L-`Ar8iAa&qT?p%!VUm+F{5n?m4nkp=|sL{Y;fN|H3LkKN*Dt z1Y=;7YzlkKm03fKn$F>LKw04|2&=#Wpc60!yd3Ysu>9;yliU=1QUbzT_^%Sr*WV^n z6}}B41J(=opj{@S;FOA&+CThbLD;w-h9epIP8D#?*!>BEjC#@-=JkqloA57|(C_3h zzk#Jlb%(FSJi=Kc)CU8A9dc#Y;z(|(Hm4#8M_-_@<4uN;>^O+{yQp!DLHHfvHT>ly zrn0ax4lq$dc0a>Smm@pCNV+==UloUbdD|kIY4GS-cSvos6eaVlOZed$39sIVbL?q- z>0|O;V~iRF?-*Ay)_hb6nE~BGP^gPkH-_c4Gfl1@x#B(1N`&%q>PtV<{=hu(WeU&X zFKHN01RYW)#Q%0>UR~#3GNqJ_knf!j7UjcrA%A41^2&wuV8CT4La4HVGrPIHn+yxigw6`9epPJ(au{9NNsEOE(VNQJ z7-Wr*?jrMR?TK}hfat>qCbDE5JVNiwMYwTiV6y0!MIcoNU5?+!5Y@sBLT3ZIk&KR4 zd(Z9;0nQRlG(bUXEd0K}6ct5bGa2FW<+A|u1FV0{IL5?uUn2Em#pKONf!CKzwFT|6 zJuHu*c7K&r6FeB4D(yc#A0nbKh51SA3CrlwF!0AjE(Y6xQoxa zpMt2O_CL`N+VkSDji9ws%K4y*#h((C~ISa0OL zoI?PfQsh9n_+}{!G>`jfhEd}xgnzk1Hvie^&tmGY{rA|w3(*#q8-JfU(m7i(XGJ)q zYM4zC0+n6%l-i`Gx@3|~*ZDVW*SV1q;DO>BH#@a;YQJeT+`q-*QLnB>~q+zVVV6 z%?%Z^jdwZAXKp5&HfH%g6S4Lq{^g#-a{Mz{D|wYYeCNt%$_~NSS^EXRcftK_w60R< zAZZ(STOeB*e;mbXFP+B$M_mZ(>VxNNLFV^FG6qr){aysW-9LG*91uUW(sp>v3*H2; z#lE$Z_Pxn%l`K+|!MF%CT6sxLrz2Gr znCfLLUudUR==I3--|%#&9ZNK@M0^iTWK%2uJb{|Iv?Y_;w@$~4}ynx|x0M{aR%K~k41HjrpSaxIutPDjq z88!|b2dSx1q1VQ1*RvljX~NqIOE3E4U8ELbBGLAkd6{cIXkRowAjwCv@wjWhQK=eE z<_SCR{o&!iLT$+0@C(-*@RTy@d z7;fOy;nJlT#vfvGJ4U8ui^v!L#Xw=&2xH(bWuw07gJF{9Q8KxIF-~M;>R~j%-FI!nxD0JZ|4H1X&CeIeaJhb2k^a9d|VfSshB$hYPP) zSQ>lFmFBo4LhBS)=cnUyyhsicfe}j zgzp=^M{eF5U|x%~G}w&AvscWUfU&gEWU{)iwOugW`3!v?wzNFe02A5J@Y zpjxz@pi%|kPx>%ON@w)EV~q-hsWnHE>-9;Z*ty?k#<0gTXxWRA@)1Vs0=Fa3Jw($M zF2}ebVAZky5EEZKuvy%uM?%*2)|UrAHa4=Wj^^W3l+94J?q66TPNc8K!tuMH*7b7d zza%;5#&e$^{BKY>z2*!(u?^fZI2;)0J)U0C@OvTq?bZ+CQ=E%@S zYnXoWT*2WrIaz(ohy^JKPmyzYkMPyfg6>j1M`b8rSnD-zzcZR^@gTFRav+pGntzyQ zCJi&b2rD!~oX5%zBYyL?1#Dkm^he&l?8L{2A*5k2mMSwtJxEp+5uC+TWA`B#B-7$% zl4uy&=)SaO>A}9m1tZ@_o#i#Zi&gkq#+elR~0qvG5%2?mG*4__e z!PEiJDkAaI8e2?3lc(wD=SW4e{!N)B!D-{H+yz0tgJ+mngOxfLG@yMQ%UNz?u_>o1 zF3w_gmj<2GZu=g&5k4t*>B*=2~QfZn(zy4VWMjX7jdUl${3u>uB>d~jFWTG!|0I7M~`6z?SJZC;vUUJW@%jXdmU3e!f#$nyl8u7 z$3!sH>mqj&B=f3*y;0d^eK!A(+i_3uqSsGxSfi8WAi#L-vAo)nhRc^LhyosIwuMW? z7>rvN3V4Ki(fUkeBZGgbzeSKN?!>^s+ADoAJ=)V0aH`G1UXnDPT?}3*Jo?|aytW{c zcAzPUck#CB2A0!S$kz;~=g21QKsSZB1>l?AVrqpIIQ_5TC>08?EO_8_fi`zn|C#U{ z9B`{fu?{-`Mg!9*H=0Tcnd$dcXy-8LyIJw_?q%sp*pm*P`3abhUe4+iMHBk(%2H+} z0_7k2KWJ6@N_)t)Qr`Vwq+TCpxXdT@{j67m&3_*|yg{f_1#(cSGPNX_+$u>hxPryBRfrDtV>IZ0V^%-#^8SHX!}A=0vKQ`d8U;l?K9i9??mcLZh*qh;&H)m z&3P>GqRea>Uu~RB(H3zNm=~j^INVqLG{fzSknAa*h8|r5k*rZ)iqH_>i48U?Bu%BT zi*b`*o}oZLLvVJTkkaLc`xcDBkqwDsE|C2K-B&q2TH`@?fYK$oPP;N(EZefuEOs!k zuvo299rDs98s%?=;BEhy2R|oIt*DrFy@RTM-5BtRry8l@MMP4>ph=-LB(l7sSn}6l zpmiAxP<0ai=P5j&Gn>1BqH5#6b5fHASIW?nW;Oxs##)AsCP>PI_*XYBV}h{$VMwei z=#m?~di7kVA3}i-0owvwFq;foOBzN`A?|-NTAXW-ktG{%57XDw{XR|7@^oY@dPMk` zTRepEEdkQKJx=k^5X%fHzQQS4z&-8xpG2TM{`^#!;%v~)q3LfOX^(9ERSXbg`)KDz z*O$9k3z0CzAqXmCLzYZ0E0==WxLGQafQ1Y04u}J8C(~eHfsY^HYy{l_+x3<-z5_cH zdl!QZ8j)PtAz=>pU>9F)d(YVjHt#~ti@#fQ)M43(aJ!6je5~;x!qa#^UNK|F5H^|Jym+xZ&K%q* z9osC%;~f=i&yl?;#&b*#6NQDzk%zmimE-wJMF&EVI$MD`6dy(f>Q4J%L1@bK)f9yb zdZpgScS+|h6#%$RP{)0x*QrU23|W#JGo1!^;eQxwkb9&^Go4R!fe@XfF0Uk+cE81) zHzB4n!Dy#<=`DP>efBEax;x-6wYfxKv1Cz*7RP~TRyqHF1^T_TAJEw22$1Jkc!lHU z_LrkfwXhei&ayq?V^U|=ex%#;US=rG_Msg(7+~H{UV4aNuVu&xy_ZPZG=_8m&Sj?~ z!od&iisK7m7!7RO|Fj^kHKU37jmxDaT>_4Awb^$7i#QIl5Yrjst{i@ z%3h;7cQ7h4hlrsw3O3BEN~o(F8Gp#x*Kxwg#E5y&{1)DI#9Yg;%ps{~4Cr9?7#=jo z@Y)Wu3c*%N_a|-sk7D(=2yii8<5yYcy$)D8re;PgV@0|7cwMF*p#X z%OVM$d5xwLzp#r9KTSiWS7LCRo~cB|a>u;yf~-?~xWPo1+#~W_Vj!Aey9bsD&)rfO zU-e$ji;u?Kezx^jIOdcBJKFF4v{=;ylX3=WmCc4oP^ubXXb@);kjj=F}{p$z- zY-@q@Yw!0}i`$u7{I;Y681{#|1Fk~FZOX3lz7zXbP|A<6t(abP>ty@-+pYKUPz5#3 zaHH&ACoDn7@?2;gb2gQ)gTgBRgc(V%Mev+)S@I4%5c4NuFGLlaLY}xX{Xi z!=hCS!vgz27D7LF(ip7@56I`IQa|o1^|{LRvkYB{KSl*8>?(yy$okY_ww7b~TTfe! zRP%M-D}0 zFa0-1p?2=8qQqsv#sVV5f)E?52gekyuAQ*zS(8ImC@^i{-#hx- zE2flSzzVuXoYw_avzk8}awM#{(*F4BEU24`xX#G<8jm&+Ncn%)9)AtfeeHAyyP|b+ zT4^(}4l`_+AbcN$w z-X!}J_aCYK9G4|6;2SGujj*fZj6c z6#yI zUx<0XkVEFT!NPv8V-M(KZfD*YK_q>racyx6>}K4&V(Pso684nZp;kuy$wWqQGl;K# zD8t$6aBVZ*lgV9;Q-c5n&=BIDXg)ZHTKjxxe6vl`l@u`_Yh-MjPro5kNlwK0voFzU z`Anx@wKy|OHSj{}XgA}#@IbWH#gYA3om;S^^dr5^*cb=9qBD^N! z5#3>cf_bW>4}bsf+O~wdE={F42!{Flt}p*g?uln_$10rxzSvSpFa4wwmdW&{N1Xey;YxQK+S@Un}Xr+W5bDxGOu0AYWAx=S&NOLR#i_?*aJa9I$<=R7Y4@a=!(}?>@FnRPs_;BJ@!Hp ztu0hi+|;b zlI%2-MZdszBHbzA{?n$om;#aqp5rJ}*pGC9^?v zzQBal>au3+V{6||>A&{@iYvk560G8b$-NxuYE>P!^$hB?NFTb-BtKV>k})0h+$z9^ z{ziUbZ)HT@!^j~tgteP~$KCQr#?@Qg%9Z%bMmhch^w$saQzVqt@GdnQl#efUN;zqdp=6Lps!otu;7v^_p?g|o_)EzWGe+17z$JD%lq5F)e8G?WQ(o-o6Q6&7f_)V z_w88eKBP39Wu2Fe-!j>New1BEMtP*5Qo@J=4E`dR?l(EGF95Z>`?{AhUtv?k# z$BU&V-0ZIRHvjoFbQ{ca36!92vwJN_K&O`-dHNNqGl=S(OE8X1?p$sNV|E6 zLPi&#jNB4&cl-p~p!>N&m7v#xNHhM!M!}iuAbn3K9ZcG9BAd&^ogus`FDh3U3wh;+ zDYzNBfj>FFT=c`KrEtHR@}4t&e!w~PFX;a7`Ouvu^FxAx5Er{-3A0cCsd0!x{$KQA zGF^*hJIr$zg(tdO%y^UyTXodFNibWBM{%nDGP9G-QN(^`-a^ckk_ZUI``r z^Pnl07e|Hdfd~FKk^Y4s%FU=#T^ZWPekMy086o}{et`r-U~M_|6i)pq2IV*Y@l88L zcE@v-Z^7p0IFptd>Py=pI+D$zi^}7aiMa|a7n|$g{>NX_5#PN?+^${H064pjfnnih zMxCFOexbH=DbQws9uXttGdpWI=8TT+Oy3isuQOss*+>kDz+p})zpoeJgk&*&;x9N( zv-dXqG>3%+$3&7`L7(bE;lNwU+>v3`fUF}hUn{}Ho*arrknpuz;!J*_#kYIUc_JZo zKXr#pIN1?0q=4(U>3_V78jjoZT%C#>CUe2ho5wCFvQ z^DCOrtoxiQ2Wd@E$ZgUKuuM6l>=W_F?zw^G^(y|Qt<=PSvk4R~M7VKmuw!zszCeO3 zbu-dp(zp?XiWls8DWSW3qn`-M20UvJ4Q!-6Zec3p1LDGcP2WDjVQksjT}u{DQd?$KY23SOB=jH*D=hiZ+A*N(qu5K2{al9TGYK{`K8HNAto z)&33(z`*dPXPz|Fm9#<eWH zKXOM`H;TZV5U4C+?Jx;iCsBD$eB|AnzZRb*$ewD(fC}j+S9K`gSCEp%X{Lre9<226T5=OPQabF`Ci*e)QVLv;1WhFzQ@*{P3u~Iv@6H-FF4T~ z0qyc=G~> zHytJ-qv!WA2Yq1CC^7Mtp&PrdtvErz1bva0a##O9QM;?-8)_H$YF`IG6@ZlVA!c-p z%w$#+bSo42l-;Pb%gnrRwp#c5OYgT?;O z$<+3>PX6Bhf+WA)iJEYwqwkH`Gj;Gp$J?j4N-AUaV^H@mFW6WUIGoT=UXQi*-rhI- zv>#v|PAz@hC3w%W4gA5HWOWVS41&DfhClrj&q{=#%cU#MBt58fSYT$9ul5^275b4| zEi#$g4`Y~VPgR_q{0i6d98zWK?N5!Q%?gU8mMYzSVL7izQOS?c?P69{;C#y;gX@o7 zC&pu__3gK7)v*}kJ<*kSKGG8h{}Y9ATOnFOZaGV%d#3DIlah-h4qw-6c2x)YdBbiyCzba(t%Gcle)N*foWNXw) zpL5nJj-4@Vr0Gk~_o?Ne(76)&F>F(zrY=4No^AmS0amO2{9FTDhzp%x)!k&a;>!)PEG>*yOSp823&k<3q#1TaCqa*Zz`{#nsu za4YrjGb?Yit_5`5CyRGXZ8`H(6hIEbAOlVj`@)6_-P?+@oNX$R4BLeN;^dmeL=PqC zNTJNoKUA(=5gowu(O4+w{$)wSoATl3Z8^&w;a4ZAa||)=?%;9?r|@as=owzT8H&cT zjX05i5y~C2Tof_zNt{1<}oU`3@J>2PN()CwXcnE&V89_a)i& z^8`zZq8rA&pFnvEZ1MW9FA)*?dzlAEG#8%qvI#V1iHLo z{Nk%QX{XsA1k~&ru$qpUpx?nA9XRtwE#Ru;=vbPr0_+`?H#6K!p~}9uaXjD!Xd#-I zU<4q9=pOq;ofMPRvo;79>apc0jY|k(seqy4UrTqQdIHSnW=TzJ%6^kFj8ZN<6Y>=! zoN`pVnHLfRZ6=_@>UR0r4v;^%Xw_^iwE&epVS5zJ+!PW@CcOPmd~Zb%%kG-~qZ#aq zZr$B-(D2iob@^V#ckl4IogteT-+=vhH6;GuK`;N_bUmS=&sDBwFX)l?5J0-K;!I;l zMe6dq(s(ECUoEKgie=Oo!f=sh%Sy54A6~ARLC7zcRH(3R;RENmcgRu3|A|Jvlh(I{ z!VDlcG<@HPwE!2~hz~}fvp#13P-CL+T^&-?(K-A|o(5@RR)I^J%J!2n)$s`B=1?W2SrIAxJH zauY5K{UeEV@bPGWa4qG@BTqx+o&pz z5M1)=FwMYD{5wV*4T%1cL!yN_WZB9UuWTWB7|gYUk8V5jxxnWINiKS~aFXv5M8g4i zo>GfJ^5To1Mrc2vgC6t1Ctoq1IzRQX6KH1ftJ+dxDojecK0v_9$GL{prjVvcTRh$$ zfGE|kOJRzOIkRfGr+%@t|7Rx~HN#w8uWN1z^AN2oeZGb?WpC1S3~nov!g#v_u|R(V zI3(^~Rzsm|8U+ev!GSp`vzdJ9*DyflCj;BBLYvyl1=^eB+H;toeb#rQNQ6j==!pG;2-C#X24BjY196nX^Rrv1*)eK|71O2QudMpu@i~e)N`yDJIFh z8sV#*PNuvIjlQOL*=k*xvwadlRaLtCO!UwI<%OK+HMr}1PrqFRW2*WoE*H$Bt%a9` z6_f9W5!Q|y!yVmrX8Z{#9kg4c^xh-)+o`cSYEkao61y@4cn|$q5rK^UF07VmkUR>m z6Y(~?z-3hBe0%3Ro|`1oebK9&`r)^&bw30(wnHfV1JI4H1ZlO==WqOje)2i2BmYyb zg4h1Ae#6*Df)`;v)BDD7vBha3+^=5SrfKsUkKrL&<-9|~glM3}VD0etNzMQRw^hvW z%>M3n;dddoF%IbFX)+cZ^3iJh_VL>%M=#S1yvOiP@Q2xyl)Jc(2G~R7is)Ki!mbFx z^74a+Yn892epJAJ!*vQt;pH1<*XAOel_mgQufa^RVG7;HEKO>DE#LOh=K~lFxI--J zrWpKsC!BSpgnUBNT=r#*r_%<Vd<`_T|^@&D7U9O!KMn>;A5U-#X9VGJmhGd`DJ~xJTJeJ6kBM z;E2^0ZLiv$t|7j5NYs!|*`AHdAl$^V+hbRXdN>LnZ6)-s(n% zI&0jh|4z^feMDd7H2jSb-AILo6E=UVOFF7PdbmaFbnxlqqCW#%e7fq`L>dGd2Y~!iIvL9!Z9q5}%$7afIXF;#gmnoPK#s3%glzJV&H;BTh|+ zAZJZpJ+6#rFW;TTv(SvSX-Xi%5F|st-yb%0Bvf3$U4K-V@-*#=A%YRdEUP8RH0+8l znMZ|_rZ+zCTKC|<WYNmpOpWD0Y{+9uon_Qpyf?~dkeN5KP+D#>Fd&Z@SNx*9pN#UfYC zh&wOemLEpYwq5z2sW_3e!dlT>sXXO`cCY|@`Et?gY^I$6XKO2SV4P;lY{r?%=yv;I zRnAcfG*euXuhz4udyx8uqF2IoFBLlB7D+AN+tgoyKz%$YpCx%9)+G z3UhRP;N5On76u)UH3_?~9%qpIxSvdR+@3lX^e{!LcqltXbIEyQ#rwUvA%9MQJYLFP zIGEu7oyO^gQpAqCIO%B6t1;gE6O&;ll-OA9t5{P!5rZ?bqJKD@migMFw54SF&sh0M zgulz?`R)DKkh(hU{LbT%xMwqG`80BHGS7aj_0j&Rf@MYT<0>ADi|g%Qo8=10yT^^F zpfRXF@6nfxN61y{V$*T&V%uPgLX0)o<_BjD|B+9$#f1R^yps45snWS~>SD-$EjH%U zHoMnsmfGh@Lkj6At5uJ1jTvM1$^5$Ft!sno7$s+QKVHjJXwKBrLB)vy+o>h)=g!?# zJ9>|v#*w2ftNC{mMoP1D;yQu~hb?APrwb|3nTb5P2UGfch8YBXS-YQ1;ee@IiLi zZm!Tl$N#wX*53Z=6q*s=EU?6@@$CJzhT8>**HCbP1p1fckgRnMOcV)zX%QzGC>kI49F)GyEmb}e-oVFQ* zs7d0z7hTh<*A2v9RN=dfh}5p#HaVN2T~hmxQ-W6jHQQQf1H|e}Pvvd2d0%w}{k*q6 zk=tf{7C#yg@g$sa>v({35jsxYIZ$U zC=25w(D4>V6n|;S5tTe{EVNK`lUiKwVmMfSZu8so(|PIlv5kuD-CtI_Mim{TEB<`v z3lT153dbVU|zD~%ahO*2)zxslqQg)+ckHo?C*7EIsN27<7+uoexTJ->= znz&#oGR**IwONfwnf#2^zhe*OlD9cDZRgTFd+oajVUxI3Iy#+l4aL)GtkUX>9zb$R zvWw&`4T25KZ2SfC=4UFXpnMmDZJq35Kgu9FxqaZL^z5;64pKuLB(z!dawaA!somjh zZm7UB6cN>7Bz`q=Sil%?UorzD@O>eE{pA#&w!`PFNfZmf4ZI?xFhksRM6od zvQ=$cS%7sf_UN)r*V!ui=(ZE(Q|BQO7DMvXem1qzV3_2CY`sW?J`x2ejkMe?O8VfB zSlJ+*rzCB02>#$;SG?`WXunD_p!OWmsXmi$`2{xAaLzv1e2G}nyHlq{^g4fUQDaI^ zCEBhokJ@5;O1zM45Tu*9;PH)m(UTi_sQ$^r_2WhSmhBxuKu2E7MKw38 zk^R?)qsK%(9_V{ltrK99p!;fT&gp@CNhnq#xRHJ9dh{;{x*)MQ9dK^h(8in3 ztaUDuQr)?5@O3c%Gws1E8pGz(o38t^)qCE{WuqcT(W`sGM? zk=J+nG_OL%pSN6R9qNdDBY;@HUoCEk$^0b7ps?|B&U@}XbgR6r&2XlI%+YY7SZ}$o zf5S*s=gGLMcS}x*7J*{j)=pT`w$FiSs6eY`#G3=u%C_%&b7 zb~GI0|7*W97%X30tzbKFGrOKDSYBOL_ZJL!TFX386GGG<`?W(o#H@XCJ>`&B+_a2o z;2mubtHrK=bylf1Mv8AMtGoszjUZ`6s}25tyW?#pIGCtuaf(nEK{|`doMZK7vrG4Y za>U!)?%c;#l*fLDrms=ktjm}ey`^j&f1nz9%$U&4>#Nn>*ccz0MoH@f4$bMYo&z`c zAu4MTv0WSDvG&Z6c(k9e?5!j|dh1XzeO_XX*<4!ufyu{lA?QFrmczID@T|OlF9yfE zY_VnVZcMUa)Q7jiU@a;L_~avT7{(q0&i7hoN&nG-= z!)^}s(tkf%rh!bHGoWUmnawaJKe2*(`DE{#Sd0rPwYG^)W|aE1$5)h~DGjdePbVz*G-0T%U zx^|PPNybm2ZeXL;sWvLH?D5Pf<3?Sb(#y3Ev@nF&encI^A{DS#BtsH(PijZpWUPR`rsC&dzT)~Vxo(`79#l@ zyJu`A4b=iizpcUG&gZZfpNqL}Q$LwxpCTg=oeQ54Mk*ZV?@keknhczweeYNF?H&@9DqI~~FXjdk>Tvu8>@L=f9L6~2OWV}% zNn=zy3Lc$Jx-Z)_smY(%C(2G^-$kjFr;pF)4UMc0jaV-}o(&KfK02}RG%B?;_E;!{ zZCS5Vx0%dFMAdvWWJ=RX5JQzLNs|9tF$=IGx^*~t0Y*Y5Yc9$LPsp?k=kDO)HL=SK zo16(go=#gihMteOzRkt+i^2UC<>bVQor%TAX`7Y$>-e(w!7c~sk>W_RAi1E5lR~T+|l=r_j{^DZS_o8YW_*6)8 z{jkRCX}El!{i#SSv!|Ctx9Q)*--JodFF5Mgvf{;_sCQK_i`)%oi(5+nPW@@xxuil? zo6K2`B=W!UxKz%l5d5TbF%jvgsdj?0Sc^~PNG1Q=eA87b*^CFhSv#-Wr7sf+BfoQ- zjxn+q6S`jN@A#vTk-X}*eLAYqpx=ovhBW-`-_N;>c|tZax2#C%#oPbXbK>ta6GatU zzifZ?bMZ(Z`qiQ1-hq~XH>|y7mR$Eb3uRVeLe1PH>~AE2a7X@ISqSh+xJCY^&-6cd zIbmCTV&yo99pO>7Oc0>c(?|dmc9wLpVNq4NvYe9)oLn*aU#+>^aXp_ExIy30+b5W zUK!%Ci1V&I_>eU-$o&>jzw`Cl)(VJF>+!!>^Uj(|0C+w{O5WVvK!20&U!9Bxx(Y_N z#kX!wOb=v2>jNxJbNV**8w%1SR_VC*8fs0rWproTSC22FM@9`}eB79#Q0^@<#O->g zGo1%dhAdZ(-&dc0o1Kd#URxZ@(aJ1yhNS8GySj{D>L#=TQ~6o^h89`v_V@br<`8oH zO#wb9#iZ0=(lNy6R-g1j#;Bym2n4MysfukPku->1o~%JXJ$4p9kKx+T+% zJu*+?S}3QyjMRT&@AdSvT--XhAfAu9&b~o1?9UF`w2khE_b{vY2W}7R8~2NL8>0#! z0!175C$Te09wV;)+V!985r++_e1&>j+nxqKOG`5?PZy3@V0HF2UCyhsmXG8eDy;WC z*`h_9SV#KEH96^tl?NH(k5S)fZL@rv^K&4z0coSQYBt$h9cu+n%eeC`W40LVzJof8-H*iL^syqH z4ar(<*DIvw##fKo#VwS7AB$YaEvWb;s14N=?&lc@{yti2mMoJ>+JZNg?7u!z4YzYG z^Q+ooUL|GqKCD^8Vn)0cHK@4CEnKX5hCLQ4c^=^IobE|FC!mK;M|RX)#E6nV^*7kd zn9FK!4)Sk8OI@s|TV7~dHVfD^LFrhs$Mo#}Iaf<7w`(LR`SrPnkH*%PTOCkow5XJN zkGEQyjzcmHn`V8D7CR~)riPd@T@N!_fXZdaixKFB^v!_I8TS>UfbRGR z=GyL172&p>T3j~gV)M~oHG-ZrC+8yD1)u`Xgu!;o#siMA-K6`_! zu{7x%!)|s_=}d73p34*WREO(T4F{u&;GF@;wQe1cH!?bW5;7N{lMdUMTz@xILm#3h z42dX3zALB9Xs*ZV5Au;H-eQ+-tO!AtxOXR0K_IYtj*X56KAna!kFOV0 zh>X1Tb$Limih=rQ@{N?~24T4HM)-YteW@sznGrj{_>_X4rxF}n5nDu#wRZ=~BqEE! z^||*oQFtZC*DBsK;)}niCX7f@=4&2Gtbk@lCqo_;7K0PbBdH{KC!*Qo0`*s&7=z8i>M#poA|7|aWw-iTnp5S?)Ikpl*-b6ST&uQtY%$n4CsI!7hC(# z7)%oKc#?WtV~}L;?A^w1;X1TnT_Ce)gr}eWNvQp?=AG*5*3&|Z*^#xyOV=-LO@;3Z zq6Lk}ybXF;URldkTYsLcUTaKFrmuKAtMtHrYM7@c>3i7rv>vCx4eAx3LGj$LG}Vl@ z5EE43pV0UZ+q=K70VG&Hc@=*N{G}B1@X^K1F74pmvZ;l;-+Y0x{%$!48kRXY znWYmJJv3LLGJjU7QI1oQ*lanVD^S#5U)ZW zt5v{%FM^m!skd{6v>Tzjn?3ni^#oS3{Hv~Pd28@a{`8uLCrL4~X2OhebfT$xS&)z__n8u@w;_TOKq_Vrpm7$B_81VB4c zPfbS^$n(c9`!Iy(M3`0|>6pQ}b`5=O&X0Vz;*>9~J^M1MJGT;eZ;b{PnRzxoO-POk zHmKpueAm}9i7<_@o}WThjsa`OU4GO7CvQL z4yIjC#HvZ+zR0-rE(rc}M@6ddDdMt_e;}SNmlIM@#!o)e_4KR2WB$_ev3rm6k3Wd#Bk z0yZ%g6fx%*F}Anwz5C|KcX6GxzUXi-QO2=>XluvUuUfFCGb9?c(^EbRBHs{taXa^Q ztFCaXK)w_icwU_YubxVG#F`&(yOWj53r^qg8#39eO6tkDhNE1s-1@7Q z^O195OJgse?i9Z94Cd+zxBrKc0SMQH5y3lied*^+7 z;}dR={9;o4U#3+@Op|keTs^d6HMi9(X8lvk<4NwL6NEB-ojzk-O=!{0yT$sSI; zW@r0FGKM8r8&wop3uE@dO#rWWmFo%i_nxrzy%fT%yR00Oe3=fLn1E3#nlW$okC3I8 zC`_-3kZkP~7L{Zv|NGciPEHP4X#fgfu<1lxx%rrA=_3ltB(c#_DYeA5WEML!>P;(6{=u1?(Nz5bok7t~DanSkWmwD2RgMSO3j7vB~FTnsNE zp*DdRzF(3UHC=kgMX=rj{j8W0lapv@Tjobw`_dScLDehDL^;S0BQyiy+Q1`eJUFPe zB}nb$8#`j>Qw_2*hYHt0&zVQBZ4&z=+hYl_`Gez~-(E4z_lrlEU;ue^7 z`X$UsFqnDt5QOc}Z>BRj`yE-?{O#y%M)Fi3SaY8>tH0+25eJIf$o*nMm@wN|CM{IS z#EOAlvd2SnzA1&vgHU1NMcZ0%LX@Gyqg?ooxP--Tec-{>dE10l7^t)NO)mdv@*Ztl zp#`cfe|wX^lRTtd*#0IK`YK~aP{(2<+hslCf^vyBNSr?+R@YX_XBF>hogTgy8kD`k z%Q8^(;U3$4^S>^u%$Ef+uubsGP-<2H$ZvYQ@rVd5Btj z{7SeY7p;6c`;I&<9LD)waUiD$8lkV2UX9xPW9iVHe=gx$I+5@2r*S0P=U7ZkOIZjLSWDDjX=rvya4)k^;kqz@g33RcGA~+;xq^6uGVrNFxL6Uax;9cy3Lr%z zQxX^vA**D6qFw+r?r8q600EWRGj8*jf`9+4iBUONBTQ2~E;JUlpCH?Go`q>-dO-rk z+<=23wW85bi9<%*J`SgfreB`({gW5tfB&A5{q$=n7K`q%>J-HpoS+OtR+*J`Cf=s7 z`L9vMnqcd{%tValLa;@KB{<@QrvF45UedjnM@Z4vo`On$E)5Nt*xt>hNP0u}1~g%K z-~yZ67Ej6$x4&JRk(mJkPrLdwwwAr$D6|b%xIJNwxlK`mfS|ejXHd^sYl`kD?mp># z zp{6qCagqwfjx=K~L+)wanPj$aU+sg5Ar**(zFl&8$QTlYZ^$`=*DfE&MA8XnE*C1Y z3WBQRL^NCzGkIeQ_n!x%U8z94_N$RvC#x!LA12A_bhY*PzN-bSFL$DraOxgks_@oW z2%&zyR(4~J>7zb@I?^j)i?i+@H*9;FT-;Nn)Hl*a`$w<(om-@M{(08sdS0d^Qs{E~ zJnF9Tj+nSN%4lc0g~)hJ1sHO%h5w?=NU{h$QaROg;Wkss5RUhl*(6+13x6tV8lhA7 zjr1*QgN|LRMJ3aavjzXYUM3s@shLB}a|>n)qw27{XbFD&y8zJCISn3A4(k`+>&IER zV+itMO^)tp zO;AgL!6Zicful#~B=RuC?tux_Il&DGNg&Krf1fIfvG@3F0l9JQzyhVAY5TKb*mZC+ zZ88*i#%x(T=r2WH#&l@;wttE0ewHmVn$@n;i|F`%21u5d#Cx(~vNnkZhaqRgm*^(E=Z1eVQ zKzsUJyONai*AIAFVNeh(@BX>j7dk>zSzu5F0jdDkST5>5gertUob+~u<+@uIdPID% zz7AAX=jYFXtrY~Vzho+AWM*t19*7c%F{bPvQHhdx_;mYqt2-SH2t6T#Xi*JIoG?Tz zx)PUs(tR#=Ad~>DQS$ew)5E4F%F4ny9Cr&k;yd=)31z0qAv(p`F5475cR#<84Gx2C z?;Skc@H+r6{Q1b3yoL-ySi6miI#eMpnL6kgWPMg2*={-zK4w)dav5q*oJ=jSdgEiv zAaiC$=?Q6Kz^ml&JBoe7+tW5{;|%=bS0hf=jU1h>yvo&p5S6*>qPqxpEKiK|EoM)5n1q4#m7yB@11!Jc{Xpa3hZVH zyetW_pOhQ{h|S88+}=t71JjDaf%PZTR>SPARR1fPc{`v{lSJPz-w26B=Gf=Qg17<<-QnuF_(g%Y8=Bq*c7 zzH05i7ssv_PFSXke;l5qa65Is2{6;weQm3lkmGU)?>N80W}@Js zm+Ilw9Nd;u1zlC!8PJ%m@O3dkW&N?LPB1RDgZ^Nv6KF!kFL^+RYV>c`VWEDC``QFr zYW7tr+`%qg)P~2U2Zgs?VDlTXd&Is?$6cgAfR&|}Ql!nIcSs)i?P#U-+>?Sj(-W`s zZ9XQ(!GcY%{J&@CdeR&*HfPuQsh3F>phymLq2IQVnzlgdb1Y2JGj zoJl~)3d&nKZ;sChR8>1{t$Ekk;UAdddzo5*{Q%CucU$#T_CBpf2N4*qQ#weEYHS(5 zMOc0tm2g|Q{-*+K)Bi>_d`@F{fxpQhCXgU?@lJlb@X*h)Ib`eG>1v1HrIN(`k*#dr z9^RwN&nb5YyeS*ZdRj~DNbH?o>9<`oavbC|m>J4U?rHEL5MsH@n`Bls|KhpF9(z9x zyjLlW?IMhDBPU5#Il}+3hxWUs6tQA^r$77`y(P%EGJ_4~e}BLq!=}4Cq{c2DXT~$p zb##%dV5Y3R^Ryhvzy1z+d=_agz!5AdKWAsMU=8jm^ zzdQvDD4m#kkQ-!zI1&B*tk~nAkV}u_5!guU<%4@jdWnGHQ~~dh)m7!n7x>^MkLy=` zt`970&+qo`FH$qlf`YZE3U&gfb}HmTq^xRhg_Aeh^;_@fX3o;SI)ni2=Tf$ie7T<~ zjJ4^W7B|K`%@O|3Q#)&t%0%baW-qI+>KCqZ*6oG^9)qsQc$GsciHx_W&+BysP1Z6R zK^;OY3A3j^g0TVKpcrU%LC0W9F%_4YG#5l+SgGVEpD{c|IV%!ZJUueLVK%L(Bi zet#(`svowusy*7FHl3MF|k_ACt@#C=-(Nd-(e2*#gEiJr(gsG@c;_!6ooTcdU$r0o7WDN**xqn)dUT(DN7uK-Yt5O2E2O^Ms8)zmFmMEi+O_|H~)e>t2rbtomy5U zHDjDGQkOR+LA}<3&kiw0?LB4(7u?|{7iqpiz@5qEz!&Pe?L~c zavOqzRuqTW!2xJraJ%uuZyYqq-%roRuKmJAOfX2#cYW^&>To`2Z|Z#yxCoT&0w^5d zORweUCZjKtR(QN=9CJ#hmz5(rfn_K1WJlGok?L=ehJDXV^v?%bQ8?1M&+d$V>|ywL zd*YIdRuaN;-V$L|)2=7VO>Wzyvc@(I>=yB#laiAzMDMhmPBh{7L^61=#!&f$Wnk9D zKdrr<<{+GTgxRY4k&>wJy@NLnvry?|$O8Ao%ln8Eh6Gn42PN(xv%S#+6PiHhJ2EGk z_aycs!{_56pv>D}$YY=aQ?oUlP=BOuFx9qy#Gd=zB6c(<73%QVKCFzxg#iPr83so1 zEfe~-`HXCM$ce%8{o|i&S7S9CxD50tn-k7SA=|dLudfP94e{N)N!~Ss;d{P=0WdT( z4KZ1bE^Tj8A7wzbj1?lM=}&|3*_s>g&9>E_G)V%`2=$a8=1dt>Z#9jcUyU|C!8tpm zj?SUR2K_YlVfL@tYdTdx&_Xsr=33mgssD&x6fHyGur&4;kgez+3EH@D1km6D@yEP3 z7=F>x;8;J%J)0kByv;~1E_O)cULz#9PNh9sxQp?Cd1v00dXun@M-8O;3K#fq=qg$uxxQYM|A!X~+Z$abYQc|s~4 zO^!CEYOYP0Y6E?3X=S3$xPC3dfAn0KoSbH%7H1D=eAkFe(FxWNx81hQNUx=+D8yz> zw;(rzeVD733jjn}`gH@} zVEl+K>9gk~zdgBrzk1M~Gqw1|irzt}g&wypGxb@rph*@EPQ^#OoHF~m={O7Z%4*e* zc5A<%dy)v)`7#YXl>0vM{qH~I=;8B8LEE+|!%JB$oO=dTzHO~)G&Z5qAgSpO!*Ozu zNta6ZYv^mgn+Mk`IiRomogbc7oKXATf7s}HsOb%UJzO_LedW%0z2+8K2FP<+ExzjV z2z?^G(VzZ0YA`Q|YJ)wS0>WXp3XtvTYLDZPZK1eRwkGQr8|||Ux3B%uI$+M01f5=X z8+OZKE=DY1Q0mVZ;7}4n&X1hd!|%PA2@K3CBkqwt6om8i6|^07Rf1tpRnoc!trlM# zZSpP5yQKZmPo)k?soyskeaX=NhU>#5ZiL8{D4;tgLV>QNtJl>hc_YAB!~NkV5Y|;? zKRm(NisavQ#76-4+S`uc3@HVi45s$r_ zs|$j_u(^(1uJSfXH;EnkjF0U;SLn!S_K>lgztXWtaLWEeEiGthb{^3Sttc7?q)$W?TMkND}?!*`=g^4u0;q0+(#+7mkGr@YAjki^Wmq{~xiBxvcn-(QJ z5C>nqgz)lvP4j`X58|_I%uUD)6uMl-L>(!V@JY>dqY#*95>fu2?bfib;=YU}dVQ(O zQHLXf5Sz~R@iZ17kd-Az;9Cpzpo6WZ;XeZ@HwzE6gpXfH=sBeQ4fOJD!!aIv$)2UI z!oo;;9=Y2{zSDAW!Qq)n(S(A$UpW4n?;s5OvnpD#?V{&>Dl+JMS8BY0k8yoq<#$b3 z(sIWL9jn_l6MFD$n==NZ{Lq}~LhYeHE1s%61f#BY;V(si(c+iwsm23<>q=4c>p{?f zbVUW`yQU%bSc6EAfY*K^GV8?)g1Nm(4w8DsSk&ZDgax^BA055&^5gn8n2+hCY{g91 zZgIKM-_o0FGy3Vh;X_)bg#ovzlAa$!LjTbb$fhes^Rw1Sw6DQ^y;;EzJ5y+YTjBM* zvRNYYacGZ2Gl4u8^yPm{e8L8IejLERNz$Qd+~6tkTd5|Pl}3{e$Uu-$Ze64-=8gif zm~8*=pA^EuGzut98a6GScy0t(zLcvd7F1htu>P4KKffA}A|&hKL(ARS#G2bY19qwg z_wRgQX_%{MpS84Y4)k3v87+#6+CdLN!soVxFgNtLw~(haAALx>TH0im?QkO)@!ORP z&utf&+90Zsf^x&?F|o58mvxfsE!v70f6@B@K2ck&0lVh5Z*1n4)jYjKx+j?VDbFXlinOzr9DBqx$@qx<=5&pHd;j%m)^n9nA*}ZPY8<_mK8)0`4UV> z@swASi|}&*5i4Tt>ILLe1_qzQtR{+a^+lL1y(9^E1gCJC&wXHsruoM8X`MA* zrA^B(bNVw7n)JT0`v@bV3}5=(Ib;9mPv!Qt)q9IEekln;*eye_)1oJQZ^8oGc-uni z%d=WsZTSBR*?pWcgva0nj=gfE*Od?)@TF%(6Wdn> zlW`Te{%S~LSo+me_*6Sd_Tr7M!!N;4SEOyN_EP%*s+c|9F;a;*x;b;=tR>;Gk(G5C zL^EU3empk;t@Bv71eULS9CJMB!{AW_Dw^UAvjcs4+4%*|5Ds4F955qwy*NoPK|^ z4HUBf8yhROj}iMGzMq-dG>+UXdY`EH01-csM(wR&0e-VG90PhXceLmWzny%l`egI+ z+bm7XAM+BAV=~6$7gKet`m2up%z)${H>b`9{!v9KkNR+(#HYX*&p`S@x9|No+Bh|r zyGP7Q_X08p+5I=4w^D+U#6#amG2O9_&v3ydeG0-m({hH3YA&y6Dj+;xLZpQWgRx5M zaZ$FsH}e)32^stCt@g1ydU-jyONQ(xeytYD|P;vjJ_d?BTCzqlQg+6$>x}B9v`b|MoADPzW zA_GAurMgnvwa$MHN1yow{nD<{O?dknB~acHelfj>JZmU5YZvp&kgWMo%yiEXPokFC z$*>mqY_rbWL&=?47FXccbK&bx9m0sk+v^m5kSKI{`SifhlFKZ6c?ipUPq=kir zZpL&6)9Z4MCd$6fRMF~-=I0?Nl&5>_N;+guuB~5dwy zT89&5h@);^q+vmQxF>{ePVtM=Dt}?wwY`Tt;*efnHZ@&V?!WDVf(EGo`SF{&@AuU> zso#&rZjYh6dA#)%J~Q*?3-121u$jDKNqYo!1FPnFYiLTE`p$-XPaOS zB@VXUBrcxxN6NZv#@YuwhA*#jqt}FP_&2TV9|-;X{+A{u4C>fB%)$eyfF0R2^(o|K zezxsO5n~Xxnl`@9#^2I@z=MGhwS%lI z`O{E;J+C<`;nGmQ12m_9hrPTtHz5T(hGp1vCEM@yIeyTzZdRB^$Ok1=v-k%DoK&$O zN0(OwWh$&_3r1Cc=+mK=I_Wj!25{8`G5JAsG;VySCvVDo= z5xteh`eA0dPPg>4>?$XI(ak^FL-t#MI$6N6LL+=yQH~|_`nX8i-0|gpC#u@Buf>6S zaLg_U9h!>O64asQL2oB&g7PuK;-s}hpDQn8Ke`Vv?cfIg26?<>Hw!8t8TWps*P$Fw zMg1l~sUJ~Tz^H&QoK{b~jX}}XhW`>5Q9f$Y({@ZNLT2b z`$)%!r#a16MLV1YRT1?RS25ctLqiSkAiic><)5_!GJiH)aN$4UO-@`UI_43zC=9ekIBQY zeyAtI#-IX$Ha)oNL2xP6tXy=eP$!zio?f_^yBo31Kp}fEfA?9_zeKH{xkglgjVVeh` z{~{%;%Yvh1LF*pfG%WLq(gIpK}o39u@ zeT=5Uf91neEO`S1I;`cNuRTeoL!5CSuE)ho{FDA;fAb`pW7`}_@W0StNK~2qn>t2m zva;SOz~x6Cn9-ikv=Ug;bkIHclNlRcw>a!X)wfDaPaNT|MbTB&z+-O`ZY~UOR&=fG z_GTd#$&~-RUCarXHGwyqz+zqN@F!0*2&DOX+r7YUF1%7~A2s@(IC_6rn^nJC>?9Qm zuYBX8x*%89V~^Pe^^?NT1Q#70@_xJi(4hDPM9Swtw^Qpw19y@N_DACm4ei<~aVloo z?*yUprFd5E!tiE(pTVvY#g*)ie zHs%@?%E3>(pakQS+~|YyNBGi9f~3i&QOz)_xBazBfVJ>a6xOfzL$f`R8L+o>hi!wX z#IOa}%eFXHG(RUL!u_n(>6ApPTALn|8)SF+*w#fO75a2oTH#oVl;WGY##@?z$(@Sc z-W=|RW0}EfAA~>L1*8TQslu~hPUtYSHr}qC4p(TL=s9@h#oOf6cp#X&Fjlk@_w=do z0FaQ-Z-Mg2Rv>xa=FDQl(`3PR^!HPUUet%BLP1b)Q#`xcriU76l#aH~_jLm-xx|WP z2qevo2~P1@r*(dvzP+Dc2Wpw}17^&_hkXsfC1sEV-xH{!h|&~jAO_^Xa2rt;V{bNdePz6WIzR7xF{@CSG=dGCE@j?51X1J%2 z$Nelit2+ghg=v^I8DZ$RhtT#F_*X-}T$!DHqvI%DwZ-~ad~JaS9(JArgUmfb-lT_f zc5r!I4HxoXEbth>6b#nsax}M4c>Wb?1kyQRlLsZ??swqwn&H#f7+>#*b-Cot+bYFl z;@A`Xp1EIc<&R0cVF{mq_3ydx1EHKsA2I2XfuFS{;L~lS{uwmrsLO%ZuRw1{MJ|Nb zV_zJ1!k6Tqx{aiRlx`XSQyjvC8n*bdoRr-eI&E@7^$0>$4(UXB6pkuq6p7I$IB$yK zCI>=7+azESiHZZ3tQUbzKexuUjcp$~4BVw#jEZ>D7jOq2ePj+@MFXmil(pEiryA4Y zrK&uHYd`&CMrbrbpsd&dQ7UG@hN0Yg@vd;nZ#QagxlB7g{q(I1W#tJ2b=7bjvp>Cd zQs$H5;s~Xm8V{{lz_`h_;;+)i6EQw);M#+ss+Sm`sJIVRyppfs_ZjF1TiW1@+HciF z^^LR%ZZOdo(N8ju9un|g9t0NHEObp%hi<ObCDy z_q9PDVK1%0T)gJ0Abkb~F`$G{>PzK+DZv_ggid>+QJHG_MNQ7r$ehs|DIW zXAXb8dM}lo4U7MAQxTP9Vr}VdET)9(jLbYHvC`BT2PHuL*n}V*36-=qw@a9pnne(6#qZsgpeChLz@jz5~2T$}lTaT08jCyk8_6LFJKo1FFzBe6Q z#TO|5CO~R*lw@J};GhlG!9)d|wizH2SW6j9M?z3sN;P0Ay7LRuoA+*#INr zX#NyMh;uUu&ve0LQ}zKdVW4V79#z1vUyk#BJ@sk2fl((Zr*S{z3H%Iw4u|$lwnnp8 z&VH^=Ze`W^i1je~Q4G&Mqw&}TNBI0De4Lg4sxC6G^CKA&iMaU$d+R(DhtkFweCY43 zO;DvLYJ$OKMInjzj;P0;J9af~KOvFA!{@DYdvd3W9n-3AP^?REKuSGKx+oMa%aCuQ zkU_R3T3U7FF@tgCGRlzKWD~nI(tCo~X?8ZoMcJuWo|ph({46~d<7EL1Jjlt(`?7rP zJX4D*)Pxd$^h*XPv+e_gGfpmEsp4LYQAisY?i~rzx z1m@;jIE+=^=r^;d* z?i|1UTGI-zd?1fVOmr}HsMOOdV){`SKTud3|&^pRebElHnZTS>LN z@h{K5SPe#Wk#HeD1?G^CnZW$(P}CQRxjOLNa%o-|U_-zAa!!V9ITe?fP&Ka~175O; z*h?laimZ44aP9HUbsC5*=L%FVjq8r!wlVtt!bWj@ehxJk(CKPn@8HX+r24 zMd^~%I2AP=wv3Wl+dzJvcpC6*ilERZBHQU~vEOXmH-?RtyOYnqR}K4h-lOr^Rl5x< zckf-M!EEHZ&Qrx?Gao6vsEZul64*==CtjH;Fjf!d^A8X$cnRsQ>Z+@BUz0y9p3V5^ zZ=*XlQ$v^yzsB0xSo3+=BbRg5>cgwmANjYy-%;xQammx}O$s*sPs88E`DaEfj{z(V zu;JmHO>EH}qPioclM)7)inYHdqLA z$+FSpH$D7NFwZkGkS`_}=cCLYB6-wvARoq@$w)<;<(aAoQC4G}N>sVwrhl#;fFgWp z+Zs6?EBc$l)+S2CWkaih$CqWEm=M7=N7~Up<#FdQHYocN9s@RuERyW>?O&K=qp~TDgYkZh=UW0JP#t9bp!A5k!g;%fwtT-+yOAz%&Ms8q>n9a0 zKEcv5qOu~^n50~H1rNAt`LiDAq!m{@WD45))Q>xgw zA7e>kha~AZ%Qy7idv3lDxJvnr?H}63ab)FdxzIP&YtVCt6`_m0>P~1mx(Xq!nz-o z%8SR6i0t#WHQD@+RT7s`c*#xx=Bf?k=I6uz(AT72ceXW z2MseoVTyy7>y`9quh8Rc<1f~D>I#a}MvBYz^yu{8>WMTI8H>k&QYzTdiCAa6f|Rdh z=pqSiW*Bjx%TLu5ue2Tw1f}Q(6alZ|l-=}fwy+?qJ=Pc8o-@EIlbRy~AXm33y zUr$WKEhO&Jw(8Jo_O)d>D>yJ%n!O6rcr(2wlTo5hp{=Lhr0^b&If$=hkD;)@ zi;3mUkp%2c3vtR?T~)Bbk=%>^;xsMACLI!K-s7b}`>9f_#FM0z(G|F#DM^}$_YtbWbl3~?tVWd zW5O;M;hM~tK-C|6AX6VKDH**k$CMga0MH?h@W@i#i>gI<{kRq-ZkM{7=eKFPn}J0qYKv(?G!Eu zvyPnk-Y&5fsnZPr?aHLF*nuaZJl zS$$CpzquFs&tc1jvO8QJ5eELjnx)68@$xlwBWs&p?i=u>)TCLNF!TWD)~|*NLl$oT zo^WW|@QKi0Qpjj58HxA-Xff9?wM@|{sh4t}^||wgL5qfVn0wpxnStEKp!V(iqk2p* zK7}aO)G54-l0oSs7DnV}d%twTVz7O(?PH_)y$?InkE_{#bhBxzQtEdb#U??&e5N(c z{;};cG~?r6mS>vUa=&Qv)1)>bJgX?CB&>y>A8*DD4zla>Y5jCZ4mlr7_FA#4)uB+j z^1~b`w!?pFsVMTf-`!Na47oURvOlyI(F4IeCDLlGM(ZygWxX9G&Y~CuR<>UjWITc* zWLtpAAHOivA8M);^7|^>EckOJlIOpAlL$oH4qxZIgQ3w^uCQ;9errx@49hW&G7$dQ ztRBu;GtuXa#l#3*gub3e@+5KThC%u3p*Btm7>h3|fo!E-gqYwUH5mw9Zrt%boCULz z8a$vgXyxNTfVjOVQ#xwnVA=8LCL_9s!L98Vh0O??N_oe`ZqXUCO61qIE8_-*l< zXLw~S28{ld&(hUTYs?A@cSuBY=~=09s& z2QT^M^qczQhH+b-IMzls@S!#l0`K6gtGEn|0D7oVF|N5T!Dp;x^U1sKR^p_?Cv&6q zTQ7Ob;~BpFab9+>OEakiXf-REmVJYT4^;PX^{DqA%JH6U7km4GrHkTGgt^9XpC09g zXS{wK(EVsO|Mh1t4JnY@(1-eBO}Tw#3Sp%XXusGyy9ZwhX37ZRFc*#Tp|M(uvXV0O zrG#DCQA1W*zbNo}Eb78z!+JAM-I%!}j}7)A`(#+Go9qK7`btiY&;3ka8ui9bU6f_; z2c3QJ{|WfNJ(V0#Fc3XH7Wmk~H&;?QuN42uw^?F5f+#bSC)Fbr+l|y}p6CTh3cQsE zMcU{F+dci-koeI5L)6c?kjK}kq{xi&*gbs0&3~Ty8hWDovM!2oAaY}nnQl@|9Dv^( zO{>w^bX>HN;H80J) zx3a6sK!OgkmH-Uyu?Y*+4Rp51d&7q7$N+E^Kk5mxwM0GQ@O6*iI7`AbGj;cj3NECb z*x($J^t|87hUv`x)$CEx40ZoM7CN!j46e07YP) z>BXqD-%9!c;r|yWkdtSt=RGJI7L@UEs_*Ukk|9kSBh$te@E(gwKPb^bCbo3sA>XU; z^pKL7a*#ZN&LE}y;yBHpB@tXA8YRWPkL_tOF-Kd0)h~Z0TALmx#+$WEx|O+`5P=SS zw5+Ccd}KRJIkZm}+5q zFDP%Dh@6nM#I!Aswn4uBpG%@3R9gbh5vKv-Z?W*zgu?Rqcdi-mBYmzw1Fu_t=q5r_ z;*!nGa^L8ZPHkniIN{@+E~xP^q8@9}7_T>u7$3HQVZD7mC%-J!fq`BSe0p)zeRZoA z(38aNC=zs!;!w2=%M$(X2>ro*&bvFEsn|pWP}847MW#j1A?p zHh+L~Rgw1;;)vQtlt{QKqt5ETWG+%U^%%2SRES zoz~rKl5$q7a*VzN!|SiEJ6UAI-n0RK7Wl6?#b;R-6iP*G<}x&!s|d*{lf@$l+7lGr z7@0$)tM5y1n8nwT6P_M*$R z-`WbJ2$qB`TW$ouww~Akjwx90&&_@Sf~$?9F+p#DQ*vNoK6IGKivp4h3u00Q&jTL& z^~a*#O_gww|Jy26w;+pdWCxe}s3XAKJZy&dG(F&1J=hfYSUt5w1r#nTW!ehTh{oox z_Ft2%#JG+2`S0YX4K8Er4~TerD1fU4p4G?5K}a04Wk#@eI~Q4jI5aoA7U);X39R6Q zZt31=lWj>~^7M&(srgUlDl+CzIW?w*@8|Y+RE>UjFFa@C9+0+~=&_vJKYkonE$^oe z9~D!2NA(?0@`{J!+Sz6tDSP1zDBv{cvv5wEm1jfC6dS(YDmy(1@Qf|03FGYtCp^&v zy^*+^pJqhWEakgSMD|m^SKRFX=Ugfc8K?a|2(F02w{i}OZ`-sKfrCKPfEO~bu=QMN z(C?0W!K-SIr0jX_cpS)i7*aEmDKxD>6L9}tyLPSO?Yn@LV{^Ew%U*7^sXdz;McGeb zC8QE)=6~&dAF`fNPr67Iuq~+8+mZ47Owx+71_R0%!G(Gf)96BOSjiXbmTJ>~5&mjA zw?}$re@SCM^BoA z3hjJ8`>C{-YRtYx{Y32j>j?W>BIVpYDq?H|d+sh6x05dy(%?B1>J1=REceMI!3!9RJyeRAd9uAEW z;?T;C1Z0FpAoy0hCwoCJ&v%`iV?eA5E#XM~HUb61OB19H4oYgfU+C6Tp@r1y#&c$_ zj^(&%)5BcfW0^er+rUV2H>JuL8JXsruJ;C8+bRQoM*+ zn&H#?ZV!Ipwiwy|`Sf5>T#^Y`Uoyr-`909OwoY?C7EDXvA1o(w5dd`2H=pFke0Zwi{$78<40Or{{M+}Fi>&;S3SYi>e;P^>0Ruin z2J5Uk7p^9*w}8`QAUV*?|3YH6H>$nSp6v9TX19_r#@!_kqnmG46mCCWx18RJ0sLpg z5Dat3eiP2$B83X~OcYsi;b5Mm%C~8^t4e$z{WEZAW9fT|2^*>> z6VCf7-mpJa^DJBJJ?H$t45)Cr`_7iF=kTnC=y*r=A<6HB+Z0`XrNmKx$(svrt%xF> zwM6fRF}<*v=8>T1n0uH*VNYM9lGc`J=S^Pyq$vnpNT}n)>>DXu?p3%>$hN81Vr@`2 zNF(2T-@=hn`VUrtkmqfxlAn6ohg7UZRo&!{JqU!>Ciwg+?KW4k{hD-P{RQXr&pFn- zYo~un!T;q;3MH?cDq{Qum;;O3{vNpu5e{#gzPd$Tzi9j}KdRf$Udt7Wsts52E;j_u zhI}*F?JWPC_@(}Gcnx=vu!5kjnhqT8iK8svb|!@kkCkI;Z_~eW-_P5e2lI*C2S(Ll z={NhWyDO)`)>@{)qjRmqaU{>9NvSWSfC1%ON1oEUMKJSjEZNM%!)rf@vT@cdm_vyB zy(kUew9;Z6P|(tEL!Spqa#4`Tka5QiR%y&joFluv-r)Squ)3_`fMQ^stSg7tu`!O@ zroI@}m0`h2u+wQ+VZj42&}{lVY6ISOE7;jN!Hz19x#V9xkphaX9&o>{YrqMl5hT3M z)B3<}%NL_ldQAduiyU_$<3Sq#4W@Ru*fH!~ncF`o?A5uKC4P9z4bm!$pD@%! z`2{Q)VsZJ41Vg~NSr0|OhD^`N>jLkot4TP=$wqum_3NzLa6}ZP0BB)12(ZFb9n7GGrN ze7lG9r-d#(Ij5QY<>_vSm|*`$$D0nbDz;_suX<+MnRdw!k9D|6@fDqy<7@@Sz0D;U z!ja42<6~;-1Rn3>{nnqR?R+z9_}s)rN9haZxn-X-8~B{N{!5;hN3`+*Rl0?PY+_n& z6qeKoG)1V^cuerS zEx5-&p%0H}coS~GyDg$d6eRIA(+^yGZ^!88&X_%dDY=1zGf_hxB;Ti6%hvUcF9rlA z$n!Q2Jb0oiq>|pVvfQcl+>s<5OMQXFt{$7az40Ju=-p;kvNdNKTPjk9{P;0DE*l&% zn1feZ-(B(=!jA%ZQ_llGRVUIfqxf?r4Ku13_im83-s=<->>om=0U#O;1(39dY?TNr zralOOij8#qn!v3tROe{0f1?@kqp5PaqfzFkiz023Wjbu~Gb5&R1ZB!bfWsU}Uo(x` zaz$}N@o@E22k2P{W&V87($ z071{ehyOHs{@o}7_Y7Pyp-uN0yv-2{E(+5PIAP*u{m)~Ay~S7f5g^Nx_B%1C+X9|ndZAf%lISXEi~@F}HRDe3O+l$1_sq*J=P zyOER*hwv z!uVXy<35c`r%FK&27#M^SoKJeU{Nqr(YB3ZuGn zStjvi+_b7tk$8{dQee18N1$In!VkuQLzn-E?)JnS3k{i9C@vKLXgMH9H(%+bZRE2P z8%%^j(%7XeXM9Q5=W4AX6Sm5HTmmmj=BqB$j(psb>D0^;ugad7d2=H(+w#pozuJlK2gka11V%}Uy^RCg>`-dfmeZmNlLiq} z{+E}yj|M^cjI+6flx8P%q-cCh`hlr@Ukp(54q+R7GaE^KIb5}~@2tZYIgVF*E@h1o zJ4?6L>j7CASgK`!Tp4^|(li_DnQ&a-=D7a-yZq3RvZa}hrA^or+3?rQaT^2Ul!eh?L8Johq2;4`m6cft2Y7Mt++2IW z&`hU?r;UhzNOS~>0D=&CsH|wHjF-)adDR!9u}21d3{-5hP-MFp3+I0Gk>`P@;N45 z*VlcQUH`?oaZt+-i@Afb9@S$>h71P2g-BUMrr^HF&Z$p49KkX(9kGN(v{)_@t?gl? zo{m>z98AdyRX`nMK){xwYAWBBe2x*(4|A$74AJwzLwRYP%`|CX^Ok+7&tn#C-U%bR zfJ|3X_|KkgS0z==KJWi!HR^JO547#&4JFx zYodY)i{H1Clo=oo=`U6hRri%|Z&602w!MfZ?9(riGv|7Qk2#mW;(0j)+_^ zGbu{tpPhFENnI--+XLYnW0KS$ZC}gbcwcTtfsCtLh7e&`1L;k4aeo^s-YdGwHFWG;c5k;y_U^y4eLIkWx2t24A&Q^U|Qb?B3_96n1iYHU_(zSsyNXW^3O* zU1342@OsX42A{Ea)IZk#M7NGi43@)jH)LTM#+WxgqSx9*>jN@Vri{$v?SdRyC?Ag< zvjJ;IL6A)_H%jFl0Yjy&DTS4C30>p9g}Jxup6}YzJ<(Oi)ALf0GC=5L)Yf~-bBXjK z`_+4unkd;-EUa;*Is<+CPBan-Bk7=GM>Bw8JFlV*6y-2S{j<41DR2xF2!d=sZ@LzT z#}(9%Uo=Z91ivO1u58WfVYT_A8iUYCYlPKbtREk(c1Qgqm#i#KA$Ln7rbNz3ej(DC6WhONfaW#&YIFmF~(Q zWMIHX@5Zqcu6{ChyhefOv|-H=Cc9K{ruTOL(6wHeBZ1`g*JmT~0wG5=kn|hUxI0uF z(bAizl;oTxw2*axCrXpN$P#41iP}Hj@n>8^XP1j{4X`9I?a#@1w{EYCBFTUS#F`v_ z0^zN&S@qJ#L(#$0SElkcw>mDm5ymIs!@ll@Q^v(jJrdAb#jyQ?-4_NpR{9u)fvHB| zzMl(VUg~Es<%Pi1VmB=%m^GZ?NJOKAe<=Fzu z9MnI)q#ga7>eu}p657X>+F6z_#cVA2*{QtW4>83(E6hDrvQ=b)7hv!S!LKGru_ZHA z5la{p#0#JEVTTqQyOnsQq#$0oxy5h~I5RB~*{w$KJ3L}pd2FairmJq*PtUPXYdAH1 z92;33^;8RVgaP}NeEl;>b^hQ(NmPO9ojI1cW&5n2Ng}kN$uHth{o^Y*L3bN4%Vfnq zHcOY)Es@)klURFQIXtt8atkfd4-Z!jHRe6~#vFb589YRzfEqcgSjnNDQKd#&fI9?j zs?hh)=R~D3CU}^_S_DWiV%7xjQ_5bfVi2k?yQUe(l^v6on4E#qel}vIWIg+(qt&p#i*On?0XuQz^kSOnC>G!~NB*jNW$k!WVD5a`xM4~Rjb-Id-(h>x)C zKT22lD6eAFRu$70{Gu$(kss+*>@rSlv6ul72D)#d3o@`Epi^45R6C3-t6(C0qn1C^r|vLI(k0Pf`XQW6P0v4UI>pT^iE~FKzMzPO2q>f zY0sDxVsTykDQSYOdzO5&JTXN|sBL=y-?9HfR&pG-YEh{0)<1vJ$HXPC) zxSzhgi*H8zT8m+m?v>r7GcV`P=jgY6xINlHO2xP+$ae?0Gx7=x@eq=w zc%KZEH;=X^P}7pipk%x-94OcFS;5T$JRnnE23BC(CuKeos9YJ=ffebZdiNy&6RpA> zS}%b5v!fpm+YpFrDGzHpZV`^d(bm~^mq*BWvhJstwRLh$^U%P>cpf$8I$Y%g`Dq3q zAG@w_Vu7YpnM=}X`(VorF(%Qlk0IJ zKIy}hU5VFYCokVa#2@5duf|0!p@!ycAYJZ%f}%R>lGIB}HWUIG#`Ghuar+&_4%X?# z?(QV1uYN<74Py-hYVRT?^y7=KFHkBqPs>#4uq5s1dJDV@9~sAY^MOsJ8}xin7sMrY z#lG@vNTc-VsHM zxjzZ;>&v4N>G>XH4crbCmC<(4Bqvn|@D6dBY$b9Ebh`%kN_@ZN4s$i<9D>#nD5R*^shal8?e=bq03??oIiJYzPdNPF&XjO30>;zDUUiiN2hd^Z zeAUC-vSGoGgZFTdb$an8ZJ%V)-3XQ(0tcL9K-Eiwij0}e8%SXM0=R=wEv!k#lp(|X z#Ix8Da$W~38SE(6gcv0U=4D*Q<;K`>xpx2(3UD@omwQf6lEvw%xyM__H?*kdPnR=m z8^Ofm-j+we19N;SizUv;a2BMF_yhuYnS%(fJ7$OM^jX$(QMIfDeBcCMM2-vbjugJ^ zfm8B#L*W5vGdxSdw8pZe;GK}`&8wBt^HvBmZU!%x4q-?x0BcrPgjx}#pYmo$)x)AU zH)cV(23s^u7;rBKg`SPR%<-@z5P?a31@*<=7ihyZn>&{{$+oB$8`yS*XxTT1;$YC2 zQv3rkt#bvcFj{Re-$y8Fn;#WwdrcSULT9MUfP>Vac+Y!l+@slX-DR0i_?R0c_B(lLX!bNV|go53r z4U{Te>;2?hW?mFpivo;s353x8dVX1ct(7miy*?Y{z>bP)XNr)Wa|Ezr=F89If!!<$ zOB;6Gzl)jAjZoKePn$uU?StVUr@`TWYK1--vUwRHZeJU#Yx9eB0(6Mj4E78<=mu$XSp5n>8tnO;k zKpT-QLmcxsTP@VGTkUlRZvB@y$QSPo%8ocr8sK3nw_KTAPR`$p#l-g1@&Y|K&y{P& ziwORdMY$CJbY)Mi9MTV?7V$fImqHxBQz}sOBND2&(a$(5ILLWt2TKVK zlJv~L`K2Xc)+`idj2zu%{w@npA_|?Zk@{Qi=uP@65G^;g0I?TrZRn9*MAk@!_^Q~7 zg23m{$dV`6Hp!8MUm9v{wCsD0t!q_j#KZj(gbGd3f@Y}`8lbtlKNOU$o^j2;@vL5t z&?`}4hov4DbdR_a3z`xY2Q|-l@mRra#7RK&gFw7WPws8WK-BzYxm_o%*)$ zhAn4;`}Fg%y3qGtbFQ9I+>f2bq55^qVr87RgIaFz3u}gm#UG}R=3-KUpL(qk+HYuF zo>+xr!fa$;iS~B)xn5}a1i?6)+Z_pa#!AabGKmt8N972@X=@uQ7g(fgGoCTV0%=%H zn4(r5y9Fj)ouFXU*Z`M+6T*nBs3u8Qtf2@wup?smYPk9lL(VsBR)lncf=?~I8~y;A zqKo@=*}4u6J#=jy`wdFKBSeHAe$2wPk9gT4L5mkku%*Zkt|yuIB-lZA}90nldT?AI%N%LP}AYabc{aDsjruo%D>+9&wnK z`n%hDPG0w?4;18=KIT)cLl*0_$N5p!*7#J5J-!?YW`dK9FqZL;s1_E+b9-64)7rM; zA@C2JdntO~H;J>Ef(^3PTgB+rpL?T(S9E8P?q@OZ@J0B+YuH-NBvWA59tiVmJto<1 zBj}#{?smTT;6eDruvRZ$BQ9Hi&)IBiobdh&RG0+yvz` z1-xO!^NA-6@RRkRDXck+F9sZHU$%@*wwVU(;tVKaF1tng=$%Hz=m|YIARU~l%0lv% z0v4JyQUH%6oSn5dtBN9!9iTs@1(p*;-T^m{|8gQ1C8i^Z9Ioo(B7_O~1H0|wjN(V9 z7R;5#Yg1_#6naCks+Y|BplBY}X_wZy@1m&e*I2A$1{f-OruGud?853kh?9mI(!Kz< zj*^SXxLW{(znZ%vww8$22Rq7%zJkG{(2u2(Rbldi@E?5!L065ba9KbHO9pBydmPtU zQFlQ&M>}KUj%BfI(D}%nX`~lSH44@Fi|W_}7~)dnGzPSQ+W9o)Ol|Z}qF1~kCx%Dx z-O*swRj0Ji)Fq~h$H#xT%Pa(&icKK4NqQw=qWUL%XMF^H{C zkG5LdVXaC~RSV_iWODc+j=Vg9fznZ0FRU^UAiU=Vu(Kx1B9K|}YP0wcC-7c@;YG_1 zk^sG&@G(bhJe07fkK5Aev8S?nWIUf^-mnd3r8&;`_S4hxRd0MnX*pUh2O&HU*XqNd z)rpk`g8{Z|&O$|DBJ)X)e*G5iRL^qD`6WfB&UJNjJK?36%i~oD+m(tR7tI>1AI`+p zW*o2#;H)pkuF@4qA}Cm3xR8MGLq)*}CyBcez}G8#g$+*TVzRM>CE91erYzW$L24f# zE8t;Ilhfph^>oZ28h2>Kh8bii+i60zRmr4c(=I%PcV^O)hUAE`xBSg@3fo;dYptvvRCHjBZSef*LHC2~uvGDJK;vF%uOHK2aEr2c17~SV?T6ep`K7Kc_t7 zZgpcvSKS$fKFvN`_@-BFCV_R94)Qa`?<+D-GhKbIb~gd57!a9RzT(WTJ=iPDdTOp(fm1}fn&7*RZqBE64ES8(wt6G30)#9mZIx+gpHTHDq zjE1&g5u?RB1YX7!mp4t0a2}K-f=xH`HN(3?Ham9Oj|#JO3=7C>WnZpRraD%x3J^i5 zKI?QTd;%4HJb&46ML=cazNdsiDuP_N-1JgJ3O0Uc<$0;U3%X!urQbLbm<$1hJ)S%G<5T zS4!9RfqMpmu=x&l<#NPd`yd8&2Qxe5mGU-in{q}SOzpfW9k?9s^pN%LS@lKY@j^$H zudRGrlkT*ut~<@Ur$cEsf5pu;mz5mfaQ304 zf`HIcX%z};F+GBTMZyAUOb=-@HV%676wyq z7eyp(r;K21MP5~xz+^5m_pJas1gKi~^Tsw}fiyo%FF%U@6CMjfZvtSRU3JTr090#7 zF^xHFb2OyN;}dZCbwa*DY(jJ&W_B4XnCWqx9j>ed+GX-Q9|y?Y5KP8H<3`M^R06W& z%$Z$PA}P7d{2-Fq+f;`%Xpgin0sDAwg8OzNBS&MUKV9{qWDYgQ zcqJE*zDUaYZkv!>w0NQW!?b7R)Tut6IP-|fLKhd@q%Vm{u}=hl=h^Buiw{II)ukY< z;}GG+LWUq&cI@&mz0=?h1c z<2mNtNtBf5;UXD}pRlr&x?7Ni(eQtM0=%ejlpdDNHB^i!>hl#`OUl&WJj2B?;H@Y| zr)Zz)_87d*D^KqAw>cucZ{k}|@hCbZKEc$~caSRHx4>_KsM!22ipZ;fQHI0)#r z>ggX5#Lfq)v}ihO3b;|{FZxz#O8ZYsv{ZCrfruJht$$W99hw4%jq74-L+NAp*IC2| zB?}_P>eI0e2wHi~X9v8GEK^F%LXHcuGO_X@HgfVZ4-CC7kdZmy>C}_N*Ktn@nDh3~ zpU+FuD%U*q?``MiAaXE(cs$CDfDF{VWhNO_ItS}MFfAccx_5y7;qzK5JzJd!=MkI_ zN)0?C9{gnveXJ$R*F|0S!O6vsT^CXETME2bYe=LLors&ybPuf8@nKrfGN`>n>?y_z zFQh-eZ(I5@*HbCFgQZWSE^M^QNm4i8>V<p5~OuXof5Z6(2Y;25MAQ zg>c}B#}?d}{nYRC3wsabM%v(Ro(zJ-c(s=ifg~_W(CwwF+Vh-4n@RcmuXJXY=JTU+ zQ(l*@y)Ey^a%IS$xr&v0Egbh@)0IGqx>^^i-)VT=x{2`WVrd7eM8cFnyenCe%tPEa ziJr>}OO^O7Au#c)uJ|EPX)jIOBwx0}Z?3tSe&zEt3;8y%38RXhm~THJf7OutsRuJFo2mO+dm6H0}IwkMecf zi>qw}g_TzQgT}HJ*VDMTFljSmQT;&PAS%9rcHYxHz9M?UEGsSo+}57kZcS%DpFSqB zjHJThe2x${;?X-57Lqx;pwX13B)5~P1A^BU{pnxw+(6n86Tgp zdWnaC&;jIV7yh2%oyb4G=%{XT39!q2ooVQyzm?AwJFT{GECO^X z=3u>)R*MfJl~8qQG8yUM$e4x_W(U0-DM411qtbS^EHuO*+{L^per56_xV=wSQIWvo zdG=YtNc8bKpp)sq2)w}d6Wu5#c@ofikMO8Q7^_0T;UR5(B zTu!y;p!>Ok8w_~G#H1QbZ*p_|!f{bNa&d`FU!@CLskSpNIFwxmfT!xnu?C5VgPy!4qCGd~D&SD+SF?tehLH1pzQJ*X?;7EkgT{sNef%3j zmQOJLCECR>Km3*s|00oI4y@M^v+44V{3LmU7Bb2l@Q)z-Aec5U3V9;HlNu@7#mppE zxN+!+KDv#o541Hu$y;a!+bLW*y3kbH=vixX2#+T3fH0JTQWwIlmU%2)@TU##9?#&) zkp>mjK4`q^t<&MY%qEs)nl4y46rPvuHlh4MS%yoz{u}zrcV=8@~O#%@<5i19@qT^)SLKJ?~n>8=8!}%41(6w<4A3 zx*E7B1WH>lWs%ppHhPN6we9<)$c)adD6hwI8QWp3IZ2W*lXz%kKZEmbh%(e1k@+G| zt10U8NQ5stOzV*ulKG6fG-U%N9bpGX9DjsXsL_#?L*3e1Bt?nw8P%&WG3H9V2z9MA z;j2#`#XEi9Kt0~6{Fr{9Lc-_i6#{aJp-VO9AEGQ!HsxLlsMFMEj>ei*Mu3a+=gu=g&NTLq~S!#&n+s%EfQ-AVaa zIZn+9#A~whPMRD8%$!iW_zH}bMC6h$Lm=ommbgbvc9%8}h3ZpU*K_c>*D%i&!-_d&Ho~rOb{%aCGt2DHn7DDiJrjbu z`Z0zakPM{j-;C`}%CI>?bK#NtQ3~8#o|*G8MsMY~ESd6{s5LveNdHP2iZvL z9_X9S3CMbV*Z|`7Vc95?;yCCm20=HpFNor%M`&6l31JB5^x^FxloSYc>8PuJ$){Af zvUxdwC}uSUh4zeUTfLHdk+i2e_T6V_0tixV(6FO|MVX{ePq9IvK@zb_#*b?blS!Ln zJ5j-&%OALSEjuw2su0&`8enl5+6`xxJ#VsZgZU~ITi`RcPdY~V4cxQ0p!m5~X0=Od zjp{3&_g#Y<)kmNls+vcW8cbw59! zk_@Y9Oy38JGCrzKe9XKf)t0qjfwB|4UPKZjMKeJ#$7(QpI@7|rhrLxjwYOigR9X(L z7}4?#9baB2xCyA|KzE(N+$3p^5Qcq_r_ZW_@tG7O4S%@)JE97M{qDUdz!;;d5&Fl< z9}8tGpqxL}Fa;Ci&pAor_gD?ujt_%YjqCH5F?{BPoCp|a$@@T(ypScLXg>x_FYxu) ze7WXiZA0@2UDg9z7?EvUV97gZwA5zQAMkp)Fk!D0J~zxB`gr@IDg94e&kABHVSRlo z`_kuFez+tnTP3El(S;8as3)?wqPVZVd)0d-MmN!_r>6(~EL)XB`PCLzy=%ye*&pw@ zsz`mL>nw5vdc~dbLKOWuguUfj5zR)Q3rc5T7Xzi_!K*JI^bgvjre(iMs1^`R=- zVZ1VENS&s9n?i zHW>nktQM<)Opx8)_U75>MsO!Cddd9yBnDQ5i z8AW*2LX^6Ku#eEkwP9y|RgJ)e_rgX@SYR44Isy#tFgFqNh!#xGOD)RHZW-4+?opT$ z7f6YrC%AHw~xg4VNvJ&U;bB^aEEaozIy<#&nT;9#sQ$2|rvXN?0XxCbxvZ2S% zWJdWg)GStYk-xLXqQdOBy?J?fhIcF>$r>7GSt#nA@MYE37LBcgk*_)i-XdaI&XgnZ z6CRf*x&sTBb`(Rbx-t#+_lxteCQYc-*q6yK)FG_D&5MifVm($XH16a_W!dmyACtA- z>8i=2FbNhesySRr+i;;oZH-as3N{MvYU&cI4C1Ih_9D+dW{N>94o|AwI$)2BqBAbg+dhm4jtBR7>oQwWaYCThuD&Fs!(Whk3;y*61EfC|& zcmYc~u+f$9A5{A6XgT%=i$O>5rU;;7;jyCi;2-p9Roc_q%WHbt@QZ;#pnPj^-qXpZ z;&}%PcyGNd|Ex&)6_|=O1P94@8*#s%WnPKBQ|;_BZnxYHk7-WieqpzSLiUjZ#pkoj zzF_$h5s86a)FqQe{7hch#9TGv{-^4;qmH@b`i#My`7HcG%*6#s-x@c%HNp+PyN$sm z;Myz+b`bcO;(AmS#B{ACdhdK6&-*q*_nM^n*s&9TP5!cPpjPcPY3V9z!nq2MdNc#% zkkS3zRVpyEi7`y7xrlPlliH91hctIUyq9==`l${4loSv{?S4>g!Pyaw!P&@oM%6N5 z8trabyxH+V>dRUV-G>^mvd0M zpmWop_R2k4##NKpi|VD4-MGQ0z`tasO;S;i^XLS*@}rK(1dn0()Ox}crVWJ^6y-5G zZUtjultnEsl4nQ%<&?oU9ggQo-jYCryOM9&`H=#~x%m{X=0ju9gEL27{#^dwuAt*F_Qv2hz!>P&ck-5fj9k|AnA+ZCH2amjzvm2bhwSqKd z@XF-KM+g$uQ|1`y{i15lcs6aW1nVdCD5|c!&(mD?X$n4q#it?oKH36dEZJf%KQb|I zTr_JhI6INxl7%Q=LH4Rha}|YD{#=miKTMCw`wlyds{wuO)Jp0r=r*-&b|(3PiBxy4 zL!Eu=47bfkk=QtDqIE+)!5^gRV^=a`;>-Rl@gq;_R=X4rbx6_!Uk(vdwX6(#)F3F5 zBUe<-H`|1{$UK`2rwt0}4$a=c0R`1u+SAs~&vu2)*6yb_ZO)(^3>YjdD=)P|(PV^pA(N48djukiBhwO`QbcX+{%h<2q7VdDhG>(rpilmrU)6n6bkNn1MpQHKem zrpLJsK7)?UPBT{-s|naN`FhQiHXmU#V_2coT{+IobNF#2 zR{CvPXM*fi0N+&4xeQG+2LIkAL2Ku~u8C{jSbcb4KUfwd;Qc0EUov~M7tXVGjifnp zMV5LTxe5KLvMLS)d!c!rKeZy{nQk1Ge@+sM|1Qd7?(aB;6bNk;>WPk3gUQ-Ygnv+) z#*b_sS(WprfpU(sq|_G*?JU6xvySR3X!>hD=3BGgNLIfb=LTUMPigLmjd$0z@N^P3 z+7kHjd|VEVm^4HluF;;_0C9W-%D|;UoE|%ShiqL`tmh+;0XZ3IBjfy@e}Gi%8ztK? z2d~#L-7zr^o=|-&6RFww_<8il;1X)(L}D@ujBL0HFIO=2Rfo*0NHqe!$Z{U$n)BuA z1Xz)C5am{bQ}RE7|3KJeCIZKTy?}~Yp(-ZVQgK+(0m&2=?nh{Vwy-EQ3`|+@0=v*1 z&f1uUlq3$SZs?P9*P%d^tnjpSCsU(Y$}&YmYga@^Kyddf_Bld-XEg30`=UpmKkL;N zBVLG13cBnM6v6N6N9mUMh&Ow%Fn}=Y#aqRlzf(T@md0#AozXC{Ykh%(U&pK?km&7> zP7q#sifEvm0+Z`!6!7LqlXg0s?I4}y+aXUvM)v-Ng`Hf`rC6Go?-XfS{Gt6um@)WZ zs`QJNj<<#SIMfsd({kg^odu$S1|PAlHE~elJG^?PpmjGhd8=H_8@s;6g**DK~2~>?oU)5~fWM^kAg=8O-uFRKpUG5g^ zIOx?KiUOWN9NiZE!T3FP$g~LpkJuoB2o)vXrhO32w=aE({WvtqlHuP!P9#5e!VrFP z7+09She*gO1C6EvQWt2<7rh-zf=O-_$bF`b$F)T6#rn2BY%~$Xnb|59aR*i~dYpx@ zRR{rO8^wdgne$kIZQ}gkKzl_lV&y`SBaJ{ZbttCp`J* zUeRXZOZL%u434?oLj(wkJ?;Wz_Gd#KATFJv@y;XFrnKtz1?~w2Uomqc$*&X)j&P(D%ptH==*lW`8N(kMi^dri7?A6Ota^F|P%v z_1>C3u4L)?;NHOeP_gPgPgv_ZCXj>AtQ`2QSO44;!41dAm|O!#(YE3I!S=K(u0y$8M&Su|S)XoSmFu zhWDp%b>_08t_ZPZ#rGa|AcP@3k1`KEFLvRtIzAhiLET4OFrTgG5gg)!^{6rv&vg0n zL{3)$*`>AhGUt-HpNHu^QT_p9RNa$kAzYQX6u=kfTy4D|jl7(FOPIhAfL0Hl?p9|( zOC@ex1$F2Kowqgsuc07~2%VW9kyr@h7B*0Fn{y>{scs@0;o7l!JvTzx7kJV?exwM# z-lDwaOG#pxtnR(rGj0>;_hTT=Q6Xu;OzXglWKJ=iNRA+$sRM$|DpM{KI|S}SZk@YF zzDa~V9{j~ofurYy3PxVbTlu+*;3}Bs=hk1x@fy8HyDoh##hySxIrKo=fG^KVnYJ2`u)?q)O6aD-ItPzD8PJ_NclpyE zI19j?7lXxi~<9b7CiGq^3NzZLIIXwG63py*lhMyb4^ND zp+0*tz&?9s!}DUgH$~#yg?nlDWEa>uB$Td^Me@??ZRpX?$O=+ecdZ-ST8HIm4Llx@ zpEI6~t*6V@EIdS8pCJh&OdJR;9wf-5fu9&*(FcNT(5g}~-q~3khPmkpLb8pxnJ_9|^A_{`BLU3Y}Px3=biMC3~t^Znfapr_{rHRh(Dz-o5Ao5rz#T5dV z^Pn`eG?#IM;0j(^3>f4I=o26yAP68AVEzz;6zK3mS0!Ea{%tKMCkr8{3#$ zP#c*U-M&#af0j}o@F(to3-7^*0Zs`t+p3{P$nN3oeH>{3-I>@ zfX|;L-O~i*rVQ7TXwcD^Ti6(x8aZj%7@3>h1uTe+rj`Uy`~esGPa5De-BSYePN1#P zU6^G1NEc$jZM1-k=muu^Zj6?$o~5mpj-$pO?S0F_-VdtE0d|LE^be})2LR@pfp)3_ zWD5seH&w-abSLods$R==zp5Ya#yqU*b&tH?s;H0ekc{CURi!xcFnk42^UdE>J-HkB zcU7?=iy&SZ6h;XBQpaXb2DojD=i~4 z8*7@s|EJr%a2x|Ngz*aMuX85$p(D zz^&IV1^)(O=@!DmO3wm7(lNKP(lfc&tKjSR*iZmCWPt0YA%p;4{#TLy- zgx7|+77F0u{#R}iOCBWZndw;>INoly>v4R)jK>cU>1qCpjJkjhd7swwG)m{}&?xk;&9t_qJIc#w9$6 zvog2(C)~|`GxZ?O6ySjL%>IdZQ(qjon=SaOzVyGZujYfe-`4m2gSh{q@An6Y48N~0 zF(A46wFCazd<-GtYZw|k{N$C3*a-|GY635 z|JtK(+tkQR-^AQrTFb^p&&uq9UUPp9M1X+*n>F0oLigA3{`YIRvz34#h|F*St%3na z^H^`R5*EtCI_OyG=^EJp!iDW035WFqEp>k_QU9%$J6r4iS|CyXlP-6+7@(DYrabgF zTFm_)YO&V>bSMQa6I;Cp+v;Ba(C_l^Zl!zq1ZWSp(%k!wIbZ*FC+^Su9Iz~514xYi zwlr~_xGkJz8{q=5*Hdf~!1c4Fdy-=ORQOj3|3m_2d3JhM*4N7uw^mH9p(BJ0f;a$D z0AL{8R6Fz`sIHcc)?K)3nSKr_?4JSxKGQwdAHo^`6N~f)iwt0>KMZ__zJM96nf0GD zr&|hN_rhz?PYG`t#`1nDJ3~ors5&i1BSUC z&-WYV>V7K2e>98$z;oQwh75qA{oh7R&O^}OHw-vn0NpF&ZyHAMA=U$ixpomSZSO|o zfq{Td7=UlPh?~?u=M&ds@_`KEew-fe4Tb-PyE6#hkK@9B827iBcRLIK?*FT#dp-rs zZvQZcwYiCw)qjWs*Xy@GOS-4U-MoJY1QP&*{2B;uI>zz`nfLeWYwE9&;HKu706%$$ z=6_0hSI55QU2FQ&(OyTGpC#SX`9a>_diHJD^_thulI~gkAnbvzeLEQ3v<@iX*ZXw7 zKN>u29gzo_|J6E$4^n^MI&gs8;(pD4(>f9l!XB{B?TB#GIM(f+%{pQ$t{~+%-tuy~1>;das%lNC~y}46H1{k5g>inl8|DW5uCont= z1O+U0KMed^Xm}VnL-|j@zXgZu4*8W*@0bbTdYk|kUp?_d9+{wJQNF45&(K0~ROYz%lT&Q%n<-ECZ@u&0}27ouh z+~?CqeFWS)9|N|&2Ec#-Ym0xCbWd@>^!{cm+)w|1>mau$CD$5ULw~mP%{2o4$N#y_zny>FoR3xi#kU7+-~EU50mdZn{2fXL)%;TnE((b4J`^4j>VU{04 z{#V0@bKcvMcTYoa51*Tc$$bd-n}%6=i1mPBZo9pkhN0!USK)u3bKEqH?L)}_YM9A~ zpuaz2Sh*ii`!@|^_Yms=!`zPMw_LYr}++O7S|Y~v;__W2Ek|Aza=T;nFrjPGIG z-!hFrK$v+u%nRNJaALUtFLo0rB=8ewVq|9gS7rb>Ghm@-Wdqp$y`6JhPvLtI=?^Rc zn~8%g;6OKhn5^KPxc|igdcfHsD=kyK>kQ;}ta_LV`SN}$!#_}E1BZQnE;j%YqQ6SI zXQTF=)IYZe|JwPc*U&Yx($l%~aLBK4K*sS`N%zdu{g?3Tvorq&e^d4_{eOjL_)oIi z82(H6eX<7t`bd>fTvZdGLF9pNcR4k{w&Wk#(=T1_`kcmp>vC$wcjErn<^Fr;Kn5h` zcL;gAbNsgkK6U<=@cSD0c4WCZrWaiAz+WFSxvzn*{b5*XI^gw}fK!$!khfd}zS~cn zsgbn~72uc!&Gip}PpUuiyW4~LVVqCY!?-*B;r-GrWj~Djn?Jmk&Wrm?Wb2=)Jz)Rj zuafR5T>lgI$2Rz1=U?tp@_r0|+ufKumAfCq-Fr9YZ%Xw5loAFFm<|G{q6ZjpHy5?o zMt)+fZK$km%&oKx^k{BQ-01F{WDNqa(q`z-9Rb)fz;zP~HTsX(e>WeziS7Pp?EO>1 zUp?~8NgMLm9n#*MQ(4;p&N)d-@G4OmnY{+M{5v+eu6xg~b+G)^cdmD(fX08L4tpaT zLtQH^do7c@`qr=OAvb;h*2KT73>&?tpy+p`Hfm`TD*An-_%0?|7G!%`QNDHzgj%>>%Xhy9*ci@ z(BjsM{|^377Qe3Lev4}_{kvNJ$>I;!BD3-jwE(8PHve1nd3coHt_84G@>fase7^Qu zwcKy@+lINBLeg&h+iL!$-2qD)X5AE#goCNy0kd0%UxDm2^+meZY5d{yZ}R z*ta${)dL*nzwO$u5pU^aQjx%bfD9mkfYAN|T!%ofgI^GU98u3o%jAEn1h>|NuPMRW z-} Date: Wed, 17 Apr 2024 15:04:04 +0200 Subject: [PATCH 024/147] style(tablemode-ui): change API functions signature --- .../explore/TableModeList/utils.ts | 2 +- webapp/src/components/common/TableMode.tsx | 11 +++--- webapp/src/services/api/constants.ts | 4 -- .../services/api/studies/tableMode/index.ts | 37 ++++++++++--------- .../services/api/studies/tableMode/type.ts | 12 ------ .../services/api/studies/tableMode/types.ts | 26 +++++++++++++ .../services/api/studies/tableMode/utils.ts | 2 +- 7 files changed, 53 insertions(+), 41 deletions(-) delete mode 100644 webapp/src/services/api/constants.ts delete mode 100644 webapp/src/services/api/studies/tableMode/type.ts create mode 100644 webapp/src/services/api/studies/tableMode/types.ts diff --git a/webapp/src/components/App/Singlestudy/explore/TableModeList/utils.ts b/webapp/src/components/App/Singlestudy/explore/TableModeList/utils.ts index 0d8060c307..e09d068ab2 100644 --- a/webapp/src/components/App/Singlestudy/explore/TableModeList/utils.ts +++ b/webapp/src/components/App/Singlestudy/explore/TableModeList/utils.ts @@ -2,7 +2,7 @@ import { v4 as uuidv4 } from "uuid"; import { TableModeColumnsForType, TableModeType, -} from "../../../../../services/api/studies/tableMode/type"; +} from "../../../../../services/api/studies/tableMode/types"; import { TABLE_MODE_COLUMNS_BY_TYPE } from "../../../../../services/api/studies/tableMode/constants"; //////////////////////////////////////////////////////////////// diff --git a/webapp/src/components/common/TableMode.tsx b/webapp/src/components/common/TableMode.tsx index a96780f43d..e92ff18011 100644 --- a/webapp/src/components/common/TableMode.tsx +++ b/webapp/src/components/common/TableMode.tsx @@ -8,7 +8,7 @@ import { TableData, TableModeColumnsForType, TableModeType, -} from "../../services/api/studies/tableMode/type"; +} from "../../services/api/studies/tableMode/types"; import { SubmitHandlerPlus } from "./Form/types"; import TableForm from "./TableForm"; import UsePromiseCond from "./utils/UsePromiseCond"; @@ -22,16 +22,17 @@ export interface TableModeProps { function TableMode(props: TableModeProps) { const { studyId, type, columns } = props; - const res = usePromise(async () => { - return getTableMode(studyId, type, columns); - }, [studyId, type, JSON.stringify(columns)]); + const res = usePromise( + () => getTableMode({ studyId, type, columns }), + [studyId, type, JSON.stringify(columns)], + ); //////////////////////////////////////////////////////////////// // Event Handlers //////////////////////////////////////////////////////////////// const handleSubmit = (data: SubmitHandlerPlus) => { - return setTableMode(studyId, type, data.dirtyValues); + return setTableMode({ studyId, type, data: data.dirtyValues }); }; //////////////////////////////////////////////////////////////// diff --git a/webapp/src/services/api/constants.ts b/webapp/src/services/api/constants.ts deleted file mode 100644 index 421ab814ed..0000000000 --- a/webapp/src/services/api/constants.ts +++ /dev/null @@ -1,4 +0,0 @@ -const API_URL_BASE = "v1"; -const STUDIES_API_URL = `${API_URL_BASE}/studies/{studyId}`; - -export const TABLE_MODE_API_URL = `${STUDIES_API_URL}/tablemode`; diff --git a/webapp/src/services/api/studies/tableMode/index.ts b/webapp/src/services/api/studies/tableMode/index.ts index c6c4db5b80..68cd8cc895 100644 --- a/webapp/src/services/api/studies/tableMode/index.ts +++ b/webapp/src/services/api/studies/tableMode/index.ts @@ -1,35 +1,36 @@ -import { DeepPartial } from "react-hook-form"; -import { StudyMetadata } from "../../../../common/types"; import client from "../../client"; import { format } from "../../../../utils/stringUtils"; -import { TABLE_MODE_API_URL } from "../../constants"; -import type { TableData, TableModeColumnsForType, TableModeType } from "./type"; +import type { + GetTableModeParams, + SetTableModeParams, + TableData, + TableModeType, +} from "./types"; import { toColumnApiName } from "./utils"; +const TABLE_MODE_API_URL = `v1/studies/{studyId}/tablemode`; + export async function getTableMode( - studyId: StudyMetadata["id"], - type: T, - columns: TableModeColumnsForType, -): Promise { + params: GetTableModeParams, +) { + const { studyId, type, columns } = params; const url = format(TABLE_MODE_API_URL, { studyId }); - const res = await client.get(url, { + + const res = await client.get(url, { params: { table_type: type, columns: columns.map(toColumnApiName).join(","), }, }); + return res.data; } -export function setTableMode( - studyId: StudyMetadata["id"], - type: TableModeType, - data: DeepPartial, -): Promise { +export async function setTableMode(params: SetTableModeParams) { + const { studyId, type, data } = params; const url = format(TABLE_MODE_API_URL, { studyId }); - return client.put(url, data, { - params: { - table_type: type, - }, + + await client.put(url, data, { + params: { table_type: type }, }); } diff --git a/webapp/src/services/api/studies/tableMode/type.ts b/webapp/src/services/api/studies/tableMode/type.ts deleted file mode 100644 index 71b751d875..0000000000 --- a/webapp/src/services/api/studies/tableMode/type.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { TABLE_MODE_COLUMNS_BY_TYPE, TABLE_MODE_TYPES } from "./constants"; - -export type TableModeType = (typeof TABLE_MODE_TYPES)[number]; - -export type TableModeColumnsForType = Array< - (typeof TABLE_MODE_COLUMNS_BY_TYPE)[T][number] ->; - -export type TableData = Record< - string, - Record ->; diff --git a/webapp/src/services/api/studies/tableMode/types.ts b/webapp/src/services/api/studies/tableMode/types.ts new file mode 100644 index 0000000000..def8344b9e --- /dev/null +++ b/webapp/src/services/api/studies/tableMode/types.ts @@ -0,0 +1,26 @@ +import { DeepPartial } from "react-hook-form"; +import type { StudyMetadata } from "../../../../common/types"; +import { TABLE_MODE_COLUMNS_BY_TYPE, TABLE_MODE_TYPES } from "./constants"; + +export type TableModeType = (typeof TABLE_MODE_TYPES)[number]; + +export type TableModeColumnsForType = Array< + (typeof TABLE_MODE_COLUMNS_BY_TYPE)[T][number] +>; + +export type TableData = Record< + string, + Record +>; + +export interface GetTableModeParams { + studyId: StudyMetadata["id"]; + type: T; + columns: TableModeColumnsForType; +} + +export interface SetTableModeParams { + studyId: StudyMetadata["id"]; + type: TableModeType; + data: DeepPartial; +} diff --git a/webapp/src/services/api/studies/tableMode/utils.ts b/webapp/src/services/api/studies/tableMode/utils.ts index e856785502..35ccd7c8a3 100644 --- a/webapp/src/services/api/studies/tableMode/utils.ts +++ b/webapp/src/services/api/studies/tableMode/utils.ts @@ -1,5 +1,5 @@ import { snakeCase } from "lodash"; -import { TableModeColumnsForType, TableModeType } from "./type"; +import { TableModeColumnsForType, TableModeType } from "./types"; export function toColumnApiName( column: TableModeColumnsForType[number], From ff241550bbe36eb35934350a1d58f8aa0a0a69d1 Mon Sep 17 00:00:00 2001 From: Samir Kamal <1954121+skamril@users.noreply.github.com> Date: Wed, 17 Apr 2024 15:31:25 +0200 Subject: [PATCH 025/147] feat(raw-ui): add download matrix API function --- webapp/src/services/api/studies/raw/index.ts | 13 +++++++++++++ webapp/src/services/api/studies/raw/types.ts | 9 +++++++++ 2 files changed, 22 insertions(+) create mode 100644 webapp/src/services/api/studies/raw/index.ts create mode 100644 webapp/src/services/api/studies/raw/types.ts diff --git a/webapp/src/services/api/studies/raw/index.ts b/webapp/src/services/api/studies/raw/index.ts new file mode 100644 index 0000000000..63e46dc31a --- /dev/null +++ b/webapp/src/services/api/studies/raw/index.ts @@ -0,0 +1,13 @@ +import client from "../../client"; +import type { DownloadMatrixParams } from "./types"; + +export async function downloadMatrix(params: DownloadMatrixParams) { + const { studyId, ...rest } = params; + const url = `v1/studies/${studyId}/raw/download`; + const res = await client.get(url, { + params: rest, + responseType: "blob", + }); + + return res.data; +} diff --git a/webapp/src/services/api/studies/raw/types.ts b/webapp/src/services/api/studies/raw/types.ts new file mode 100644 index 0000000000..e524fbdc72 --- /dev/null +++ b/webapp/src/services/api/studies/raw/types.ts @@ -0,0 +1,9 @@ +import type { StudyMetadata } from "../../../../common/types"; + +export interface DownloadMatrixParams { + studyId: StudyMetadata["id"]; + path: string; + format?: "tsv" | "xlsx"; + header?: boolean; + index?: boolean; +} From c82e388db222365c771b9ea6f00417866f771e4b Mon Sep 17 00:00:00 2001 From: Samir Kamal <1954121+skamril@users.noreply.github.com> Date: Tue, 2 Apr 2024 16:47:44 +0200 Subject: [PATCH 026/147] feat(utils-ui): create fileUtils --- webapp/src/utils/fileUtils.ts | 13 +++++++++++++ 1 file changed, 13 insertions(+) create mode 100644 webapp/src/utils/fileUtils.ts diff --git a/webapp/src/utils/fileUtils.ts b/webapp/src/utils/fileUtils.ts new file mode 100644 index 0000000000..0233d6f984 --- /dev/null +++ b/webapp/src/utils/fileUtils.ts @@ -0,0 +1,13 @@ +/** + * Triggers the download of a file with the given data and name. + * + * @param fileData - The data of the file to be downloaded. + * @param fileName - The name of the file to be downloaded. + */ +export function downloadFile(fileData: BlobPart, fileName: string) { + const link = document.createElement("a"); + link.href = URL.createObjectURL(new Blob([fileData])); + link.download = fileName; + link.click(); + URL.revokeObjectURL(link.href); +} From a650ebbacab84c167b4742a13387baa304c72a4d Mon Sep 17 00:00:00 2001 From: Samir Kamal <1954121+skamril@users.noreply.github.com> Date: Wed, 17 Apr 2024 15:24:54 +0200 Subject: [PATCH 027/147] feat(common-ui): create SplitButton component --- .../components/common/buttons/SplitButton.tsx | 144 ++++++++++++++++++ 1 file changed, 144 insertions(+) create mode 100644 webapp/src/components/common/buttons/SplitButton.tsx diff --git a/webapp/src/components/common/buttons/SplitButton.tsx b/webapp/src/components/common/buttons/SplitButton.tsx new file mode 100644 index 0000000000..684e74f212 --- /dev/null +++ b/webapp/src/components/common/buttons/SplitButton.tsx @@ -0,0 +1,144 @@ +import Button from "@mui/material/Button"; +import ButtonGroup, { ButtonGroupProps } from "@mui/material/ButtonGroup"; +import ArrowDropDownIcon from "@mui/icons-material/ArrowDropDown"; +import ClickAwayListener from "@mui/material/ClickAwayListener"; +import Grow from "@mui/material/Grow"; +import Paper from "@mui/material/Paper"; +import Popper from "@mui/material/Popper"; +import MenuItem from "@mui/material/MenuItem"; +import MenuList from "@mui/material/MenuList"; +import { useRef, useState } from "react"; +import LoadingButton, { type LoadingButtonProps } from "@mui/lab/LoadingButton"; + +interface OptionObj { + value: Value; + label?: string; + disabled?: boolean; +} + +export interface SplitButtonProps + extends Omit { + options: Array>; + dropdownActionMode?: "change" | "trigger"; + onClick?: (optionValue: OptionValue, optionIndex: number) => void; + ButtonProps?: Omit; +} + +export default function SplitButton( + props: SplitButtonProps, +) { + const { + options, + dropdownActionMode = "trigger", + onClick, + children, + ButtonProps: loadingButtonProps, + ...buttonGroupProps + } = props; + const [open, setOpen] = useState(false); + const [selectedIndex, setSelectedIndex] = useState(0); + const anchorRef = useRef(null); + const isChangeMode = dropdownActionMode === "change"; + const isTriggerMode = dropdownActionMode === "trigger"; + const formattedOptions = options.map((option) => + typeof option === "string" ? { value: option } : option, + ); + + //////////////////////////////////////////////////////////////// + // Event Handlers + //////////////////////////////////////////////////////////////// + + const handleButtonClick = () => { + if (onClick && formattedOptions.length > 0) { + const index = isChangeMode ? selectedIndex : 0; + onClick(formattedOptions[index].value, index); + } + }; + + const handleMenuItemClick = (index: number) => { + setSelectedIndex(index); + setOpen(false); + + if (isTriggerMode) { + onClick?.(formattedOptions[index].value, index); + } + }; + + const handleToggle = () => { + setOpen((prevOpen) => !prevOpen); + }; + + const handleClose = (event: Event) => { + if (anchorRef.current?.contains(event.target as HTMLElement)) { + return; + } + + setOpen(false); + }; + + //////////////////////////////////////////////////////////////// + // Utils + //////////////////////////////////////////////////////////////// + + const getDropdownLabel = (index: number) => { + const { value, label } = formattedOptions[index] || {}; + return label ?? value; + }; + + const getButtonLabel = (index: number) => { + return isChangeMode ? getDropdownLabel(index) : children; + }; + + //////////////////////////////////////////////////////////////// + // JSX + //////////////////////////////////////////////////////////////// + + return ( + <> + + + {getButtonLabel(selectedIndex)} + + + + + {({ TransitionProps }) => ( + + + + + {formattedOptions.map((option, index) => ( + handleMenuItemClick(index)} + > + {getDropdownLabel(index)} + + ))} + + + + + )} + + + ); +} From 1736dfa48fd204240951973b5580fe6ff997add3 Mon Sep 17 00:00:00 2001 From: Samir Kamal <1954121+skamril@users.noreply.github.com> Date: Wed, 17 Apr 2024 15:34:19 +0200 Subject: [PATCH 028/147] feat(common-ui): create DownloadMatrixButton component --- webapp/public/locales/en/main.json | 1 + webapp/public/locales/fr/main.json | 1 + .../common/DownloadMatrixButton.tsx | 85 +++++++++++++++++++ 3 files changed, 87 insertions(+) create mode 100644 webapp/src/components/common/DownloadMatrixButton.tsx diff --git a/webapp/public/locales/en/main.json b/webapp/public/locales/en/main.json index 4ce793b79d..4fc695f630 100644 --- a/webapp/public/locales/en/main.json +++ b/webapp/public/locales/en/main.json @@ -48,6 +48,7 @@ "global.emptyString": "Empty string", "global.edit": "Edit", "global.download": "Download", + "global.download.error": "Download failed", "global.generate": "Generate", "global.user": "User", "global.users": "Users", diff --git a/webapp/public/locales/fr/main.json b/webapp/public/locales/fr/main.json index 89afd931dc..ea8b1b1dd5 100644 --- a/webapp/public/locales/fr/main.json +++ b/webapp/public/locales/fr/main.json @@ -48,6 +48,7 @@ "global.emptyString": "Chaine de caractères vide", "global.edit": "Editer", "global.download": "Télécharger", + "global.download.error": "Le téléchargement a échoué", "global.generate": "Générer", "global.user": "Utilisateur", "global.users": "Utilisateurs", diff --git a/webapp/src/components/common/DownloadMatrixButton.tsx b/webapp/src/components/common/DownloadMatrixButton.tsx new file mode 100644 index 0000000000..49551eb0f7 --- /dev/null +++ b/webapp/src/components/common/DownloadMatrixButton.tsx @@ -0,0 +1,85 @@ +import FileUploadIcon from "@mui/icons-material/FileUpload"; +import SplitButton from "./buttons/SplitButton.tsx"; +import { downloadMatrix } from "../../services/api/studies/raw/index.ts"; +import { downloadFile } from "../../utils/fileUtils.ts"; +import { useState } from "react"; +import { StudyMetadata } from "../../common/types.ts"; +import useEnqueueErrorSnackbar from "../../hooks/useEnqueueErrorSnackbar.tsx"; +import { useTranslation } from "react-i18next"; + +export interface DownloadMatrixButtonProps { + studyId: StudyMetadata["id"]; + path?: string; + disabled?: boolean; + label?: string; +} + +const EXPORT_OPTIONS = [ + { label: "TSV", value: "tsv" }, + { label: "Excel", value: "xlsx" }, +] as const; + +type ExportFormat = (typeof EXPORT_OPTIONS)[number]["value"]; + +function DownloadMatrixButton(props: DownloadMatrixButtonProps) { + const { t } = useTranslation(); + const { studyId, path, disabled, label = t("global.export") } = props; + const [isDownloading, setIsDownloading] = useState(false); + const enqueueErrorSnackbar = useEnqueueErrorSnackbar(); + + //////////////////////////////////////////////////////////////// + // Event Handlers + //////////////////////////////////////////////////////////////// + + const handleDownload = async (format: ExportFormat) => { + if (!path) { + return; + } + + setIsDownloading(true); + + const isExcel = format === "xlsx"; + + try { + const res = await downloadMatrix({ + studyId, + path, + format, + header: isExcel, + index: isExcel, + }); + + downloadFile( + res, + `matrix_${studyId}_${path.replace("/", "_")}.${format}`, + ); + } catch (err) { + enqueueErrorSnackbar(t("global.download.error"), String(err)); + } + + setIsDownloading(false); + }; + + //////////////////////////////////////////////////////////////// + // JSX + //////////////////////////////////////////////////////////////// + + return ( + , + loadingPosition: "start", + loading: isDownloading, + }} + > + {label} + + ); +} + +export default DownloadMatrixButton; From 8d3d29e09e08e55d926d6eafe9a2043c44aa8a76 Mon Sep 17 00:00:00 2001 From: Samir Kamal <1954121+skamril@users.noreply.github.com> Date: Wed, 17 Apr 2024 15:35:10 +0200 Subject: [PATCH 029/147] feat(results-ui): update download button --- .../explore/Results/ResultDetails/index.tsx | 56 +++++++------------ 1 file changed, 21 insertions(+), 35 deletions(-) diff --git a/webapp/src/components/App/Singlestudy/explore/Results/ResultDetails/index.tsx b/webapp/src/components/App/Singlestudy/explore/Results/ResultDetails/index.tsx index 2675d7d424..8f4d0ce2f6 100644 --- a/webapp/src/components/App/Singlestudy/explore/Results/ResultDetails/index.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Results/ResultDetails/index.tsx @@ -1,6 +1,5 @@ import { Box, - Button, Paper, Skeleton, ToggleButton, @@ -12,7 +11,6 @@ import { useTranslation } from "react-i18next"; import { useNavigate, useOutletContext, useParams } from "react-router"; import axios from "axios"; import GridOffIcon from "@mui/icons-material/GridOff"; -import DownloadOutlinedIcon from "@mui/icons-material/DownloadOutlined"; import { Area, LinkElement, @@ -44,12 +42,12 @@ import UsePromiseCond, { mergeResponses, } from "../../../../../common/utils/UsePromiseCond"; import useStudySynthesis from "../../../../../../redux/hooks/useStudySynthesis"; -import { downloadMatrix } from "../../../../../../utils/matrixUtils"; import ButtonBack from "../../../../../common/ButtonBack"; import BooleanFE from "../../../../../common/fieldEditors/BooleanFE"; import SelectFE from "../../../../../common/fieldEditors/SelectFE"; import NumberFE from "../../../../../common/fieldEditors/NumberFE"; import moment from "moment"; +import DownloadMatrixButton from "../../../../../common/DownloadMatrixButton.tsx"; function ResultDetails() { const { study } = useOutletContext<{ study: StudyMetadata }>(); @@ -105,17 +103,22 @@ function ResultDetails() { [filteredItems], ); + const path = useMemo(() => { + if (output && selectedItem && !isSynthesis) { + return createPath({ + output, + item: selectedItem, + dataType, + timestep, + year, + }); + } + return ""; + }, [output, selectedItem, isSynthesis, dataType, timestep, year]); + const matrixRes = usePromise( async () => { - if (output && selectedItem && !isSynthesis) { - const path = createPath({ - output, - item: selectedItem, - dataType, - timestep, - year, - }); - + if (path) { const res = await getStudyData(study.id, path); if (typeof res === "string") { const fixed = res @@ -131,18 +134,17 @@ function ResultDetails() { { resetDataOnReload: true, resetErrorOnReload: true, - deps: [study.id, output, selectedItem, dataType, timestep, year], + deps: [study.id, path], }, ); - const { data: synthesis } = usePromise( - async () => { + const { data: synthesis } = usePromise( + () => { if (outputId && selectedItem && isSynthesis) { const path = `output/${outputId}/economy/mc-all/grid/${selectedItem.id}`; - const res = await getStudyData(study.id, path); - return res; + return getStudyData(study.id, path); } - return null; + return Promise.resolve(null); }, { deps: [study.id, outputId, selectedItem], @@ -203,10 +205,6 @@ function ResultDetails() { } }; - const handleDownload = (matrixData: MatrixType, fileName: string): void => { - downloadMatrix(matrixData, fileName); - }; - //////////////////////////////////////////////////////////////// // JSX //////////////////////////////////////////////////////////////// @@ -385,19 +383,7 @@ function ResultDetails() { ))} - + Date: Wed, 17 Apr 2024 15:35:16 +0200 Subject: [PATCH 030/147] feat(common-ui): update matrix buttons in MatrixInput --- webapp/public/locales/en/main.json | 2 + webapp/public/locales/fr/main.json | 2 + .../components/common/MatrixInput/index.tsx | 103 +++++------------- .../components/common/MatrixInput/style.ts | 3 +- webapp/src/utils/matrixUtils.ts | 11 -- 5 files changed, 35 insertions(+), 86 deletions(-) delete mode 100644 webapp/src/utils/matrixUtils.ts diff --git a/webapp/public/locales/en/main.json b/webapp/public/locales/en/main.json index 4fc695f630..0205b8a0fd 100644 --- a/webapp/public/locales/en/main.json +++ b/webapp/public/locales/en/main.json @@ -32,6 +32,8 @@ "global.open": "Open", "global.name": "Name", "global.import": "Import", + "global.import.fromFile": "From a file", + "global.import.fromDatabase": "From database", "global.importHint": "Click or drag and drop here", "global.launch": "Launch", "global.jobs": "Jobs", diff --git a/webapp/public/locales/fr/main.json b/webapp/public/locales/fr/main.json index ea8b1b1dd5..86768663c2 100644 --- a/webapp/public/locales/fr/main.json +++ b/webapp/public/locales/fr/main.json @@ -32,6 +32,8 @@ "global.open": "Ouvrir", "global.name": "Nom", "global.import": "Importer", + "global.import.fromFile": "Depuis un fichier", + "global.import.fromDatabase": "Depuis la base de donnée", "global.importHint": "Cliquer ou glisser ici", "global.launch": "Lancer", "global.jobs": "Tâches", diff --git a/webapp/src/components/common/MatrixInput/index.tsx b/webapp/src/components/common/MatrixInput/index.tsx index 1bc3d00f33..fdeb92749f 100644 --- a/webapp/src/components/common/MatrixInput/index.tsx +++ b/webapp/src/components/common/MatrixInput/index.tsx @@ -3,14 +3,11 @@ import { useSnackbar } from "notistack"; import { useState } from "react"; import { AxiosError } from "axios"; import debug from "debug"; -import { Typography, Box, Button, Divider, Tooltip } from "@mui/material"; -import UploadOutlinedIcon from "@mui/icons-material/UploadOutlined"; -import DownloadOutlinedIcon from "@mui/icons-material/DownloadOutlined"; -import InventoryIcon from "@mui/icons-material/Inventory"; +import { Typography, Box, Divider } from "@mui/material"; +import FileDownloadIcon from "@mui/icons-material/FileDownload"; import { MatrixEditDTO, MatrixStats, - MatrixType, StudyMetadata, } from "../../../common/types"; import useEnqueueErrorSnackbar from "../../../hooks/useEnqueueErrorSnackbar"; @@ -23,9 +20,9 @@ import SimpleContent from "../page/SimpleContent"; import EditableMatrix from "../EditableMatrix"; import ImportDialog from "../dialogs/ImportDialog"; import MatrixAssignDialog from "./MatrixAssignDialog"; -import { downloadMatrix } from "../../../utils/matrixUtils"; import { fetchMatrixFn } from "../../App/Singlestudy/explore/Modelization/Areas/Hydro/utils"; -import { LoadingButton } from "@mui/lab"; +import SplitButton from "../buttons/SplitButton"; +import DownloadMatrixButton from "../DownloadMatrixButton.tsx"; const logErr = debug("antares:createimportform:error"); @@ -57,7 +54,6 @@ function MatrixInput({ const [t] = useTranslation(); const [openImportDialog, setOpenImportDialog] = useState(false); const [openMatrixAsignDialog, setOpenMatrixAsignDialog] = useState(false); - const [isDownloading, setIsDownloading] = useState(false); const { data: matrixData, @@ -86,6 +82,7 @@ function MatrixInput({ * Otherwise, default row numbers and timestamps are displayed using initialRowNames. */ const rowNames = fetchFn ? matrixIndex : initialRowNames; + const columnsLength = matrixData?.columns?.length ?? 0; //////////////////////////////////////////////////////////////// // Utils @@ -138,16 +135,6 @@ function MatrixInput({ } }; - const handleDownload = async (matrixData: MatrixType, fileName: string) => { - setIsDownloading(true); - - // Re-fetch to get latest data - const data = await fetchMatrixData(); - downloadMatrix(data, fileName); - - setIsDownloading(false); - }; - //////////////////////////////////////////////////////////////// // JSX //////////////////////////////////////////////////////////////// @@ -166,54 +153,36 @@ function MatrixInput({ > {title || t("xpansion.timeSeries")} - - - - - - {matrixData?.columns?.length >= 1 && ( - } - onClick={() => - handleDownload( - matrixData, - `matrix_${study.id}_${url.replace("/", "_")}`, - ) - } - > - {t("global.download")} - - )} + + {isLoading && } - {!isLoading && matrixData?.columns?.length >= 1 && matrixIndex ? ( + {!isLoading && columnsLength >= 1 && matrixIndex ? ( ) : ( - !isLoading && ( - } - onClick={() => setOpenImportDialog(true)} - > - {t("global.import")} - - } - /> - ) + !isLoading && )} {openImportDialog && ( diff --git a/webapp/src/components/common/MatrixInput/style.ts b/webapp/src/components/common/MatrixInput/style.ts index 6fd49fd167..72b77419cb 100644 --- a/webapp/src/components/common/MatrixInput/style.ts +++ b/webapp/src/components/common/MatrixInput/style.ts @@ -13,7 +13,8 @@ export const Root = styled(Box)(({ theme }) => ({ export const Header = styled(Box)(({ theme }) => ({ width: "100%", display: "flex", - flexFlow: "row nowrap", + flexFlow: "row wrap", + gap: theme.spacing(1), justifyContent: "space-between", alignItems: "flex-end", })); diff --git a/webapp/src/utils/matrixUtils.ts b/webapp/src/utils/matrixUtils.ts deleted file mode 100644 index c4380f8bcb..0000000000 --- a/webapp/src/utils/matrixUtils.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { MatrixType } from "../common/types"; - -export function downloadMatrix(matrixData: MatrixType, fileName: string): void { - const fileData = matrixData.data.map((row) => row.join("\t")).join("\n"); - const blob = new Blob([fileData], { type: "text/plain" }); - const a = document.createElement("a"); - a.download = fileName; - a.href = URL.createObjectURL(blob); - a.click(); - URL.revokeObjectURL(a.href); -} From e524b13d61bea1dc6fc2437ee5f1a8a33d9af733 Mon Sep 17 00:00:00 2001 From: MartinBelthle Date: Thu, 18 Apr 2024 10:59:50 +0200 Subject: [PATCH 031/147] fix(bc): better handling of default values (#2004) --- antarest/study/business/all_optional_meta.py | 94 ++++++ .../business/areas/renewable_management.py | 3 +- .../business/areas/st_storage_management.py | 3 +- .../business/areas/thermal_management.py | 3 +- .../business/binding_constraint_management.py | 185 +++++------ .../business/thematic_trimming_field_infos.py | 3 +- antarest/study/business/utils.py | 89 ------ .../study/business/xpansion_management.py | 2 +- .../business/command_extractor.py | 128 ++++---- .../variantstudy/business/command_reverter.py | 117 +++---- .../business/utils_binding_constraint.py | 85 ----- .../command/create_binding_constraint.py | 293 ++++++++++++------ .../variantstudy/model/command/icommand.py | 8 +- .../command/update_binding_constraint.py | 121 ++++---- .../storage/variantstudy/model/interfaces.py | 2 +- .../variantstudy/variant_command_generator.py | 5 + antarest/study/web/study_data_blueprint.py | 15 +- antarest/tools/lib.py | 20 +- tests/integration/assets/base_study.zip | Bin 305897 -> 305876 bytes tests/integration/assets/variant_study.zip | Bin 311302 -> 311282 bytes .../test_binding_constraints.py | 89 ++++++ tests/integration/test_integration.py | 10 +- .../test_integration_variantmanager_tool.py | 29 +- .../business/test_all_optional_metaclass.py | 2 +- .../test_manage_binding_constraints.py | 35 ++- tests/variantstudy/test_command_factory.py | 25 +- 26 files changed, 733 insertions(+), 633 deletions(-) create mode 100644 antarest/study/business/all_optional_meta.py diff --git a/antarest/study/business/all_optional_meta.py b/antarest/study/business/all_optional_meta.py new file mode 100644 index 0000000000..06ddc012d8 --- /dev/null +++ b/antarest/study/business/all_optional_meta.py @@ -0,0 +1,94 @@ +import typing as t + +import pydantic.fields +import pydantic.main +from pydantic import BaseModel + +from antarest.core.utils.string import to_camel_case + + +class AllOptionalMetaclass(pydantic.main.ModelMetaclass): + """ + Metaclass that makes all fields of a Pydantic model optional. + + Usage: + class MyModel(BaseModel, metaclass=AllOptionalMetaclass): + field1: str + field2: int + ... + + Instances of the model can be created even if not all fields are provided during initialization. + Default values, when provided, are used unless `use_none` is set to `True`. + """ + + def __new__( + cls: t.Type["AllOptionalMetaclass"], + name: str, + bases: t.Tuple[t.Type[t.Any], ...], + namespaces: t.Dict[str, t.Any], + use_none: bool = False, + **kwargs: t.Dict[str, t.Any], + ) -> t.Any: + """ + Create a new instance of the metaclass. + + Args: + name: Name of the class to create. + bases: Base classes of the class to create (a Pydantic model). + namespaces: namespace of the class to create that defines the fields of the model. + use_none: If `True`, the default value of the fields is set to `None`. + Note that this field is not part of the Pydantic model, but it is an extension. + **kwargs: Additional keyword arguments used by the metaclass. + """ + # Modify the annotations of the class (but not of the ancestor classes) + # in order to make all fields optional. + # If the current model inherits from another model, the annotations of the ancestor models + # are not modified, because the fields are already converted to `ModelField`. + annotations = namespaces.get("__annotations__", {}) + for field_name, field_type in annotations.items(): + if not field_name.startswith("__"): + # Making already optional fields optional is not a problem (nothing is changed). + annotations[field_name] = t.Optional[field_type] + namespaces["__annotations__"] = annotations + + if use_none: + # Modify the namespace fields to set their default value to `None`. + for field_name, field_info in namespaces.items(): + if isinstance(field_info, pydantic.fields.FieldInfo): + field_info.default = None + field_info.default_factory = None + + # Create the class: all annotations are converted into `ModelField`. + instance = super().__new__(cls, name, bases, namespaces, **kwargs) + + # Modify the inherited fields of the class to make them optional + # and set their default value to `None`. + model_field: pydantic.fields.ModelField + for field_name, model_field in instance.__fields__.items(): + model_field.required = False + model_field.allow_none = True + if use_none: + model_field.default = None + model_field.default_factory = None + model_field.field_info.default = None + + return instance + + +MODEL = t.TypeVar("MODEL", bound=t.Type[BaseModel]) + + +def camel_case_model(model: MODEL) -> MODEL: + """ + This decorator can be used to modify a model to use camel case aliases. + + Args: + model: The pydantic model to modify. + + Returns: + The modified model. + """ + model.__config__.alias_generator = to_camel_case + for field_name, field in model.__fields__.items(): + field.alias = to_camel_case(field_name) + return model diff --git a/antarest/study/business/areas/renewable_management.py b/antarest/study/business/areas/renewable_management.py index 7858409f17..84f6e56672 100644 --- a/antarest/study/business/areas/renewable_management.py +++ b/antarest/study/business/areas/renewable_management.py @@ -4,8 +4,9 @@ from pydantic import validator from antarest.core.exceptions import DuplicateRenewableCluster, RenewableClusterConfigNotFound, RenewableClusterNotFound +from antarest.study.business.all_optional_meta import AllOptionalMetaclass, camel_case_model from antarest.study.business.enum_ignore_case import EnumIgnoreCase -from antarest.study.business.utils import AllOptionalMetaclass, camel_case_model, execute_or_add_commands +from antarest.study.business.utils import execute_or_add_commands from antarest.study.model import Study from antarest.study.storage.rawstudy.model.filesystem.config.model import transform_name_to_id from antarest.study.storage.rawstudy.model.filesystem.config.renewable import ( diff --git a/antarest/study/business/areas/st_storage_management.py b/antarest/study/business/areas/st_storage_management.py index 85ceb41f1c..73f03b8ec3 100644 --- a/antarest/study/business/areas/st_storage_management.py +++ b/antarest/study/business/areas/st_storage_management.py @@ -15,7 +15,8 @@ STStorageMatrixNotFound, STStorageNotFound, ) -from antarest.study.business.utils import AllOptionalMetaclass, camel_case_model, execute_or_add_commands +from antarest.study.business.all_optional_meta import AllOptionalMetaclass, camel_case_model +from antarest.study.business.utils import execute_or_add_commands from antarest.study.model import Study from antarest.study.storage.rawstudy.model.filesystem.config.model import transform_name_to_id from antarest.study.storage.rawstudy.model.filesystem.config.st_storage import ( diff --git a/antarest/study/business/areas/thermal_management.py b/antarest/study/business/areas/thermal_management.py index 88281a7be5..d5520c0d43 100644 --- a/antarest/study/business/areas/thermal_management.py +++ b/antarest/study/business/areas/thermal_management.py @@ -11,7 +11,8 @@ ThermalClusterNotFound, WrongMatrixHeightError, ) -from antarest.study.business.utils import AllOptionalMetaclass, camel_case_model, execute_or_add_commands +from antarest.study.business.all_optional_meta import AllOptionalMetaclass, camel_case_model +from antarest.study.business.utils import execute_or_add_commands from antarest.study.model import Study from antarest.study.storage.rawstudy.model.filesystem.config.model import transform_name_to_id from antarest.study.storage.rawstudy.model.filesystem.config.thermal import ( diff --git a/antarest/study/business/binding_constraint_management.py b/antarest/study/business/binding_constraint_management.py index 8c44784920..010e1424d0 100644 --- a/antarest/study/business/binding_constraint_management.py +++ b/antarest/study/business/binding_constraint_management.py @@ -1,5 +1,6 @@ import collections import itertools +import json import logging from typing import Any, Dict, List, Mapping, MutableSequence, Optional, Sequence, Tuple, Union @@ -20,7 +21,8 @@ WrongMatrixHeightError, ) from antarest.core.utils.string import to_camel_case -from antarest.study.business.utils import AllOptionalMetaclass, camel_case_model, execute_or_add_commands +from antarest.study.business.all_optional_meta import camel_case_model +from antarest.study.business.utils import execute_or_add_commands from antarest.study.model import Study from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import BindingConstraintFrequency from antarest.study.storage.rawstudy.model.filesystem.config.model import transform_name_to_id @@ -40,11 +42,13 @@ ) from antarest.study.storage.variantstudy.model.command.common import BindingConstraintOperator from antarest.study.storage.variantstudy.model.command.create_binding_constraint import ( + DEFAULT_GROUP, EXPECTED_MATRIX_SHAPES, + TERM_MATRICES, BindingConstraintMatrices, - BindingConstraintProperties, - BindingConstraintProperties870, + BindingConstraintPropertiesBase, CreateBindingConstraint, + OptionalProperties, ) from antarest.study.storage.variantstudy.model.command.remove_binding_constraint import RemoveBindingConstraint from antarest.study.storage.variantstudy.model.command.update_binding_constraint import UpdateBindingConstraint @@ -52,9 +56,6 @@ logger = logging.getLogger(__name__) -DEFAULT_GROUP = "default" -"""Default group name for binding constraints if missing or empty.""" - class LinkTerm(BaseModel): """ @@ -226,7 +227,7 @@ def match_filters(self, constraint: "ConstraintOutput") -> bool: @camel_case_model -class ConstraintInput870(BindingConstraintProperties870, metaclass=AllOptionalMetaclass, use_none=True): +class ConstraintInput870(OptionalProperties): pass @@ -243,7 +244,7 @@ class ConstraintCreation(ConstraintInput): @root_validator(pre=True) def check_matrices_dimensions(cls, values: Dict[str, Any]) -> Dict[str, Any]: - for _key in ["time_step", "less_term_matrix", "equal_term_matrix", "greater_term_matrix"]: + for _key in ["time_step"] + TERM_MATRICES: _camel = to_camel_case(_key) values[_key] = values.pop(_camel, values.get(_key)) @@ -261,7 +262,7 @@ def check_matrices_dimensions(cls, values: Dict[str, Any]) -> Dict[str, Any]: # Collect the matrix shapes matrix_shapes = {} - for _field_name in ["values", "less_term_matrix", "equal_term_matrix", "greater_term_matrix"]: + for _field_name in ["values"] + TERM_MATRICES: if _matrix := values.get(_field_name): _array = np.array(_matrix) # We only store the shape if the array is not empty @@ -293,20 +294,26 @@ def check_matrices_dimensions(cls, values: Dict[str, Any]) -> Dict[str, Any]: @camel_case_model -class ConstraintOutputBase(BindingConstraintProperties): +class ConstraintOutputBase(BindingConstraintPropertiesBase): id: str name: str - terms: MutableSequence[ConstraintTerm] = Field( - default_factory=lambda: [], - ) + terms: MutableSequence[ConstraintTerm] = Field(default_factory=lambda: []) @camel_case_model -class ConstraintOutput870(ConstraintOutputBase): +class ConstraintOutput830(ConstraintOutputBase): + filter_year_by_year: str = "" + filter_synthesis: str = "" + + +@camel_case_model +class ConstraintOutput870(ConstraintOutput830): group: str = DEFAULT_GROUP -ConstraintOutput = Union[ConstraintOutputBase, ConstraintOutput870] +# WARNING: Do not change the order of the following line, it is used to determine +# the type of the output constraint in the FastAPI endpoint. +ConstraintOutput = Union[ConstraintOutputBase, ConstraintOutput830, ConstraintOutput870] def _get_references_by_widths( @@ -387,9 +394,7 @@ def __init__( self.storage_service = storage_service @staticmethod - def parse_and_add_terms( - key: str, value: Any, adapted_constraint: Union[ConstraintOutputBase, ConstraintOutput870] - ) -> None: + def parse_and_add_terms(key: str, value: Any, adapted_constraint: ConstraintOutput) -> None: """Parse a single term from the constraint dictionary and add it to the adapted_constraint model.""" if "%" in key or "." in key: separator = "%" if "%" in key else "." @@ -425,24 +430,24 @@ def parse_and_add_terms( @staticmethod def constraint_model_adapter(constraint: Mapping[str, Any], version: int) -> ConstraintOutput: """ - Adapts a constraint configuration to the appropriate version-specific format. + Adapts a binding constraint configuration to the appropriate model version. - Parameters: - - constraint: A dictionary or model representing the constraint to be adapted. - This can either be a dictionary coming from client input or an existing - model that needs reformatting. - - version: An integer indicating the target version of the study configuration. This is used to + Args: + constraint: A dictionary or model representing the constraint to be adapted. + This can either be a dictionary coming from client input or an existing + model that needs reformatting. + version: An integer indicating the target version of the study configuration. This is used to determine which model class to instantiate and which default values to apply. Returns: - - A new instance of either `ConstraintOutputBase` or `ConstraintOutput870`, - populated with the adapted values from the input constraint, and conforming to the - structure expected by the specified version. + A new instance of either `ConstraintOutputBase`, `ConstraintOutput830`, or `ConstraintOutput870`, + populated with the adapted values from the input constraint, and conforming to the + structure expected by the specified version. Note: - This method is crucial for ensuring backward compatibility and future-proofing the application - as it evolves. It allows client-side data to be accurately represented within the config and - ensures data integrity when storing or retrieving constraint configurations from the database. + This method is crucial for ensuring backward compatibility and future-proofing the application + as it evolves. It allows client-side data to be accurately represented within the config and + ensures data integrity when storing or retrieving constraint configurations from the database. """ constraint_output = { @@ -455,19 +460,20 @@ def constraint_model_adapter(constraint: Mapping[str, Any], version: int) -> Con "terms": constraint.get("terms", []), } - # TODO: Implement a model for version-specific fields. Output filters are sent regardless of the version. - if version >= 840: - constraint_output["filter_year_by_year"] = constraint.get("filter_year_by_year") or constraint.get( - "filter-year-by-year", "" - ) - constraint_output["filter_synthesis"] = constraint.get("filter_synthesis") or constraint.get( - "filter-synthesis", "" - ) - - adapted_constraint: Union[ConstraintOutputBase, ConstraintOutput870] + if version >= 830: + _filter_year_by_year = constraint.get("filter_year_by_year") or constraint.get("filter-year-by-year", "") + _filter_synthesis = constraint.get("filter_synthesis") or constraint.get("filter-synthesis", "") + constraint_output["filter_year_by_year"] = _filter_year_by_year + constraint_output["filter_synthesis"] = _filter_synthesis if version >= 870: constraint_output["group"] = constraint.get("group", DEFAULT_GROUP) + + # Choose the right model according to the version + adapted_constraint: ConstraintOutput + if version >= 870: adapted_constraint = ConstraintOutput870(**constraint_output) + elif version >= 830: + adapted_constraint = ConstraintOutput830(**constraint_output) else: adapted_constraint = ConstraintOutputBase(**constraint_output) @@ -675,38 +681,28 @@ def create_binding_constraint( check_attributes_coherence(data, version) - new_constraint = { - "name": data.name, - "enabled": data.enabled, - "time_step": data.time_step, - "operator": data.operator, - "coeffs": self.terms_to_coeffs(data.terms), - "values": data.values, - "less_term_matrix": data.less_term_matrix, - "equal_term_matrix": data.equal_term_matrix, - "greater_term_matrix": data.greater_term_matrix, - "filter_year_by_year": data.filter_year_by_year, - "filter_synthesis": data.filter_synthesis, - "comments": data.comments or "", + new_constraint = {"name": data.name, **json.loads(data.json(exclude={"terms", "name"}, exclude_none=True))} + args = { + **new_constraint, + "command_context": self.storage_service.variant_study_service.command_factory.command_context, } + if data.terms: + args["coeffs"] = self.terms_to_coeffs(data.terms) - if version >= 870: - new_constraint["group"] = data.group or DEFAULT_GROUP - - command = CreateBindingConstraint( - **new_constraint, command_context=self.storage_service.variant_study_service.command_factory.command_context - ) + command = CreateBindingConstraint(**args) # Validates the matrices. Needed when the study is a variant because we only append the command to the list if isinstance(study, VariantStudy): - command.validates_and_fills_matrices(specific_matrices=None, version=version, create=True) + time_step = data.time_step or BindingConstraintFrequency.HOURLY + command.validates_and_fills_matrices( + time_step=time_step, specific_matrices=None, version=version, create=True + ) file_study = self.storage_service.get_storage(study).get_raw(study) execute_or_add_commands(study, file_study, [command], self.storage_service) # Processes the constraints to add them inside the endpoint response. new_constraint["id"] = bc_id - new_constraint["type"] = data.time_step return self.constraint_model_adapter(new_constraint, version) def update_binding_constraint( @@ -720,33 +716,16 @@ def update_binding_constraint( study_version = int(study.version) check_attributes_coherence(data, study_version) - # Because the update_binding_constraint command requires every attribute we have to fill them all. - # This creates a `big` command even though we only updated one field. - # fixme : Change the architecture to avoid this type of misconception upd_constraint = { "id": binding_constraint_id, - "enabled": data.enabled if data.enabled is not None else existing_constraint.enabled, - "time_step": data.time_step or existing_constraint.time_step, - "operator": data.operator or existing_constraint.operator, - "coeffs": self.terms_to_coeffs(data.terms) or self.terms_to_coeffs(existing_constraint.terms), - "comments": data.comments or existing_constraint.comments, + **json.loads(data.json(exclude={"terms", "name"}, exclude_none=True)), } - - if study_version >= 840: - upd_constraint["filter_year_by_year"] = data.filter_year_by_year or existing_constraint.filter_year_by_year - upd_constraint["filter_synthesis"] = data.filter_synthesis or existing_constraint.filter_synthesis - - if study_version >= 870: - upd_constraint["group"] = data.group or existing_constraint.group # type: ignore - args = { **upd_constraint, "command_context": self.storage_service.variant_study_service.command_factory.command_context, } - - for term in ["values", "less_term_matrix", "equal_term_matrix", "greater_term_matrix"]: - if matrices_to_update := getattr(data, term): - args[term] = matrices_to_update + if data.terms: + args["coeffs"] = self.terms_to_coeffs(data.terms) if data.time_step is not None and data.time_step != existing_constraint.time_step: # The user changed the time step, we need to update the matrix accordingly @@ -756,22 +735,26 @@ def update_binding_constraint( # Validates the matrices. Needed when the study is a variant because we only append the command to the list if isinstance(study, VariantStudy): - updated_matrices = [ - term for term in ["less_term_matrix", "equal_term_matrix", "greater_term_matrix"] if getattr(data, term) - ] + updated_matrices = [term for term in TERM_MATRICES if getattr(data, term)] + time_step = data.time_step or existing_constraint.time_step command.validates_and_fills_matrices( - specific_matrices=updated_matrices, version=study_version, create=False + time_step=time_step, specific_matrices=updated_matrices, version=study_version, create=False ) execute_or_add_commands(study, file_study, [command], self.storage_service) - # Processes the constraints to add them inside the endpoint response. + # Constructs the endpoint response. upd_constraint["name"] = existing_constraint.name - upd_constraint["type"] = upd_constraint["time_step"] - # Replace coeffs by the terms - del upd_constraint["coeffs"] + upd_constraint["type"] = upd_constraint.get("time_step", existing_constraint.time_step) upd_constraint["terms"] = data.terms or existing_constraint.terms - + new_fields = ["enabled", "operator", "comments", "terms"] + if study_version >= 830: + new_fields.extend(["filter_year_by_year", "filter_synthesis"]) + if study_version >= 870: + new_fields.append("group") + for field in new_fields: + if field not in upd_constraint: + upd_constraint[field] = getattr(data, field) or getattr(existing_constraint, field) return self.constraint_model_adapter(upd_constraint, study_version) def remove_binding_constraint(self, study: Study, binding_constraint_id: str) -> None: @@ -829,13 +812,7 @@ def update_constraint_term( command = UpdateBindingConstraint( id=constraint.id, - enabled=constraint.enabled, - time_step=constraint.time_step, - operator=constraint.operator, coeffs=coeffs, - filter_year_by_year=constraint.filter_year_by_year, - filter_synthesis=constraint.filter_synthesis, - comments=constraint.comments, command_context=self.storage_service.variant_study_service.command_factory.command_context, ) execute_or_add_commands(study, file_study, [command], self.storage_service) @@ -866,22 +843,10 @@ def create_constraint_term( ) ) - coeffs = {} - - for term in constraint_terms: - coeffs[term.id] = [term.weight] - if term.offset: - coeffs[term.id].append(term.offset) - + coeffs = {term.id: [term.weight] + [term.offset] if term.offset else [term.weight] for term in constraint_terms} command = UpdateBindingConstraint( id=constraint.id, - enabled=constraint.enabled, - time_step=constraint.time_step, - operator=constraint.operator, coeffs=coeffs, - comments=constraint.comments, - filter_year_by_year=constraint.filter_year_by_year, - filter_synthesis=constraint.filter_synthesis, command_context=self.storage_service.variant_study_service.command_factory.command_context, ) execute_or_add_commands(study, file_study, [command], self.storage_service) @@ -913,7 +878,7 @@ def _replace_matrices_according_to_frequency_and_version( BindingConstraintFrequency.DAILY.value: default_bc_weekly_daily_87, BindingConstraintFrequency.WEEKLY.value: default_bc_weekly_daily_87, }[data.time_step].tolist() - for term in ["less_term_matrix", "equal_term_matrix", "greater_term_matrix"]: + for term in TERM_MATRICES: if term not in args: args[term] = matrix return args diff --git a/antarest/study/business/thematic_trimming_field_infos.py b/antarest/study/business/thematic_trimming_field_infos.py index 764c2c9590..3baabd8014 100644 --- a/antarest/study/business/thematic_trimming_field_infos.py +++ b/antarest/study/business/thematic_trimming_field_infos.py @@ -4,7 +4,8 @@ import typing as t -from antarest.study.business.utils import AllOptionalMetaclass, FormFieldsBaseModel +from antarest.study.business.all_optional_meta import AllOptionalMetaclass +from antarest.study.business.utils import FormFieldsBaseModel class ThematicTrimmingFormFields(FormFieldsBaseModel, metaclass=AllOptionalMetaclass, use_none=True): diff --git a/antarest/study/business/utils.py b/antarest/study/business/utils.py index 53596bf797..8c4b567b22 100644 --- a/antarest/study/business/utils.py +++ b/antarest/study/business/utils.py @@ -1,7 +1,5 @@ import typing as t -import pydantic.fields -import pydantic.main from pydantic import BaseModel from antarest.core.exceptions import CommandApplicationError @@ -82,90 +80,3 @@ class FieldInfo(t.TypedDict, total=False): encode: t.Optional[t.Callable[[t.Any], t.Any]] # (encoded_value, current_value) -> decoded_value decode: t.Optional[t.Callable[[t.Any, t.Optional[t.Any]], t.Any]] - - -class AllOptionalMetaclass(pydantic.main.ModelMetaclass): - """ - Metaclass that makes all fields of a Pydantic model optional. - - Usage: - class MyModel(BaseModel, metaclass=AllOptionalMetaclass): - field1: str - field2: int - ... - - Instances of the model can be created even if not all fields are provided during initialization. - Default values, when provided, are used unless `use_none` is set to `True`. - """ - - def __new__( - cls: t.Type["AllOptionalMetaclass"], - name: str, - bases: t.Tuple[t.Type[t.Any], ...], - namespaces: t.Dict[str, t.Any], - use_none: bool = False, - **kwargs: t.Dict[str, t.Any], - ) -> t.Any: - """ - Create a new instance of the metaclass. - - Args: - name: Name of the class to create. - bases: Base classes of the class to create (a Pydantic model). - namespaces: namespace of the class to create that defines the fields of the model. - use_none: If `True`, the default value of the fields is set to `None`. - Note that this field is not part of the Pydantic model, but it is an extension. - **kwargs: Additional keyword arguments used by the metaclass. - """ - # Modify the annotations of the class (but not of the ancestor classes) - # in order to make all fields optional. - # If the current model inherits from another model, the annotations of the ancestor models - # are not modified, because the fields are already converted to `ModelField`. - annotations = namespaces.get("__annotations__", {}) - for field_name, field_type in annotations.items(): - if not field_name.startswith("__"): - # Making already optional fields optional is not a problem (nothing is changed). - annotations[field_name] = t.Optional[field_type] - namespaces["__annotations__"] = annotations - - if use_none: - # Modify the namespace fields to set their default value to `None`. - for field_name, field_info in namespaces.items(): - if isinstance(field_info, pydantic.fields.FieldInfo): - field_info.default = None - field_info.default_factory = None - - # Create the class: all annotations are converted into `ModelField`. - instance = super().__new__(cls, name, bases, namespaces, **kwargs) - - # Modify the inherited fields of the class to make them optional - # and set their default value to `None`. - model_field: pydantic.fields.ModelField - for field_name, model_field in instance.__fields__.items(): - model_field.required = False - model_field.allow_none = True - if use_none: - model_field.default = None - model_field.default_factory = None - model_field.field_info.default = None - - return instance - - -MODEL = t.TypeVar("MODEL", bound=t.Type[BaseModel]) - - -def camel_case_model(model: MODEL) -> MODEL: - """ - This decorator can be used to modify a model to use camel case aliases. - - Args: - model: The pydantic model to modify. - - Returns: - The modified model. - """ - model.__config__.alias_generator = to_camel_case - for field_name, field in model.__fields__.items(): - field.alias = to_camel_case(field_name) - return model diff --git a/antarest/study/business/xpansion_management.py b/antarest/study/business/xpansion_management.py index 1bb80cfbaf..22c612af9a 100644 --- a/antarest/study/business/xpansion_management.py +++ b/antarest/study/business/xpansion_management.py @@ -11,8 +11,8 @@ from antarest.core.exceptions import BadZipBinary from antarest.core.model import JSON +from antarest.study.business.all_optional_meta import AllOptionalMetaclass from antarest.study.business.enum_ignore_case import EnumIgnoreCase -from antarest.study.business.utils import AllOptionalMetaclass from antarest.study.model import Study from antarest.study.storage.rawstudy.model.filesystem.bucket_node import BucketNode from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy diff --git a/antarest/study/storage/variantstudy/business/command_extractor.py b/antarest/study/storage/variantstudy/business/command_extractor.py index 9aa5a9b397..4ac5070a69 100644 --- a/antarest/study/storage/variantstudy/business/command_extractor.py +++ b/antarest/study/storage/variantstudy/business/command_extractor.py @@ -1,6 +1,6 @@ import base64 import logging -from typing import List, Optional, Tuple, cast +import typing as t import numpy as np @@ -9,13 +9,11 @@ from antarest.matrixstore.model import MatrixData from antarest.matrixstore.service import ISimpleMatrixService from antarest.study.storage.patch_service import PatchService -from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import BindingConstraintFrequency from antarest.study.storage.rawstudy.model.filesystem.config.files import get_playlist from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy from antarest.study.storage.rawstudy.model.filesystem.root.filestudytree import FileStudyTree from antarest.study.storage.variantstudy.business.matrix_constants_generator import GeneratorMatrixConstants from antarest.study.storage.variantstudy.business.utils import strip_matrix_protocol -from antarest.study.storage.variantstudy.model.command.common import BindingConstraintOperator from antarest.study.storage.variantstudy.model.command.create_area import CreateArea from antarest.study.storage.variantstudy.model.command.create_binding_constraint import CreateBindingConstraint from antarest.study.storage.variantstudy.model.command.create_cluster import CreateCluster @@ -40,7 +38,7 @@ def _find_binding_config(binding_id: str, study_tree: FileStudyTree) -> JSON: url = ["input", "bindingconstraints", "bindingconstraints"] for binding_config in study_tree.get(url).values(): if binding_config["id"] == binding_id: - return cast(JSON, binding_config) + return t.cast(JSON, binding_config) raise ValueError(f"Binding constraint '{binding_id}' not found in '{''.join(url)}'") @@ -56,7 +54,7 @@ def __init__(self, matrix_service: ISimpleMatrixService, patch_service: PatchSer patch_service=self.patch_service, ) - def extract_area(self, study: FileStudy, area_id: str) -> Tuple[List[ICommand], List[ICommand]]: + def extract_area(self, study: FileStudy, area_id: str) -> t.Tuple[t.List[ICommand], t.List[ICommand]]: stopwatch = StopWatch() study_tree = study.tree study_config = study.config @@ -64,7 +62,7 @@ def extract_area(self, study: FileStudy, area_id: str) -> Tuple[List[ICommand], optimization_data = study_tree.get(["input", "areas", area_id, "optimization"]) ui_data = study_tree.get(["input", "areas", area_id, "ui"]) - study_commands: List[ICommand] = [ + study_commands: t.List[ICommand] = [ CreateArea( area_name=area.name, command_context=self.command_context, @@ -83,7 +81,7 @@ def extract_area(self, study: FileStudy, area_id: str) -> Tuple[List[ICommand], stopwatch.log_elapsed(lambda x: logger.info(f"Area command extraction done in {x}s")) links_data = study_tree.get(["input", "links", area_id, "properties"]) - links_commands: List[ICommand] = [] + links_commands: t.List[ICommand] = [] for link in area.links: links_commands += self.extract_link(study, area_id, link, links_data) @@ -142,8 +140,8 @@ def extract_link( study: FileStudy, area1: str, area2: str, - links_data: Optional[JSON] = None, - ) -> List[ICommand]: + links_data: t.Optional[JSON] = None, + ) -> t.List[ICommand]: study_tree = study.tree link_command = CreateLink( area1=area1, @@ -162,7 +160,7 @@ def extract_link( command_context=self.command_context, ) null_matrix_id = strip_matrix_protocol(self.generator_matrix_constants.get_null_matrix()) - commands: List[ICommand] = [link_command, link_config_command] + commands: t.List[ICommand] = [link_command, link_config_command] if study.config.version < 820: commands.append( self.generate_replace_matrix( @@ -193,7 +191,7 @@ def extract_link( ) return commands - def _extract_cluster(self, study: FileStudy, area_id: str, cluster_id: str, renewables: bool) -> List[ICommand]: + def _extract_cluster(self, study: FileStudy, area_id: str, cluster_id: str, renewables: bool) -> t.List[ICommand]: study_tree = study.tree if renewables: cluster_type = "renewables" # with a final "s" @@ -209,7 +207,7 @@ def _extract_cluster(self, study: FileStudy, area_id: str, cluster_id: str, rene null_matrix_id = strip_matrix_protocol(self.generator_matrix_constants.get_null_matrix()) # Note that cluster IDs are case-insensitive, but series IDs are in lower case. series_id = cluster_id.lower() - study_commands: List[ICommand] = [ + study_commands: t.List[ICommand] = [ create_cluster_command( area_id=area_id, cluster_name=cluster.id, @@ -239,13 +237,13 @@ def _extract_cluster(self, study: FileStudy, area_id: str, cluster_id: str, rene ) return study_commands - def extract_cluster(self, study: FileStudy, area_id: str, thermal_id: str) -> List[ICommand]: + def extract_cluster(self, study: FileStudy, area_id: str, thermal_id: str) -> t.List[ICommand]: return self._extract_cluster(study, area_id, thermal_id, False) - def extract_renewables_cluster(self, study: FileStudy, area_id: str, renewables_id: str) -> List[ICommand]: + def extract_renewables_cluster(self, study: FileStudy, area_id: str, renewables_id: str) -> t.List[ICommand]: return self._extract_cluster(study, area_id, renewables_id, True) - def extract_hydro(self, study: FileStudy, area_id: str) -> List[ICommand]: + def extract_hydro(self, study: FileStudy, area_id: str) -> t.List[ICommand]: study_tree = study.tree commands = [ self.generate_replace_matrix( @@ -306,8 +304,8 @@ def extract_hydro(self, study: FileStudy, area_id: str) -> List[ICommand]: return commands - def extract_district(self, study: FileStudy, district_id: str) -> List[ICommand]: - study_commands: List[ICommand] = [] + def extract_district(self, study: FileStudy, district_id: str) -> t.List[ICommand]: + study_commands: t.List[ICommand] = [] study_config = study.config study_tree = study.tree district_config = study_config.sets[district_id] @@ -325,64 +323,66 @@ def extract_district(self, study: FileStudy, district_id: str) -> List[ICommand] ) return study_commands - def extract_comments(self, study: FileStudy) -> List[ICommand]: + def extract_comments(self, study: FileStudy) -> t.List[ICommand]: study_tree = study.tree - content = cast(bytes, study_tree.get(["settings", "comments"])) + content = t.cast(bytes, study_tree.get(["settings", "comments"])) comments = content.decode("utf-8") - return [ - UpdateComments( - comments=comments, - command_context=self.command_context, - ) - ] + return [UpdateComments(comments=comments, command_context=self.command_context)] def extract_binding_constraint( self, study: FileStudy, binding_id: str, - bindings_data: Optional[JSON] = None, - ) -> List[ICommand]: + bindings_data: t.Optional[JSON] = None, + ) -> t.List[ICommand]: study_tree = study.tree + + # Retrieve binding constraint properties from the study tree, + # so, field names follow the same convention as the INI file. binding: JSON = _find_binding_config(binding_id, study_tree) if bindings_data is None else bindings_data - binding_constraint_command = CreateBindingConstraint( - name=binding["name"], - enabled=binding["enabled"], - time_step=BindingConstraintFrequency(binding["type"]), - operator=BindingConstraintOperator(binding["operator"]), - coeffs={ - coeff: [float(el) for el in str(value).split("%")] - for coeff, value in binding.items() - if "%" in coeff or "." in coeff - }, - comments=binding.get("comments", None), - command_context=self.command_context, - ) - study_commands: List[ICommand] = [ - binding_constraint_command, - self.generate_replace_matrix( - study_tree, - ["input", "bindingconstraints", binding["id"]], - ), - ] - return study_commands - def generate_update_config( - self, - study_tree: FileStudyTree, - url: List[str], - ) -> ICommand: + # Extract the binding constraint ID, which is recalculated from the name in the command + bc_id = binding.pop("id") + + # Extract binding constraint terms, which keys contain "%" or "." + terms = {} + for term_id, value in sorted(binding.items()): + if "%" in term_id or "." in term_id: + weight, _, offset = str(value).partition("%") + term_value = [float(weight), int(offset)] if offset else [float(weight)] + terms[term_id] = term_value + del binding[term_id] + + # Extract the matrices associated with the binding constraint + if study.config.version < 870: + urls = {"values": ["input", "bindingconstraints", bc_id]} + else: + urls = { + "less_term_matrix": ["input", "bindingconstraints", f"{bc_id}_lt"], + "greater_term_matrix": ["input", "bindingconstraints", f"{bc_id}_gt"], + "equal_term_matrix": ["input", "bindingconstraints", f"{bc_id}_eq"], + } + + matrices: t.Dict[str, t.List[t.List[float]]] = {} + for name, url in urls.items(): + matrix = study_tree.get(url) + if matrix is not None: + matrices[name] = matrix["data"] + + # Create the command to create the binding constraint + create_cmd = CreateBindingConstraint(**binding, **matrices, coeffs=terms, command_context=self.command_context) + + return [create_cmd] + + def generate_update_config(self, study_tree: FileStudyTree, url: t.List[str]) -> ICommand: data = study_tree.get(url) - return UpdateConfig( - target="/".join(url), - data=data, - command_context=self.command_context, - ) + return UpdateConfig(target="/".join(url), data=data, command_context=self.command_context) - def generate_update_rawfile(self, study_tree: FileStudyTree, url: List[str]) -> ICommand: + def generate_update_raw_file(self, study_tree: FileStudyTree, url: t.List[str]) -> ICommand: data = study_tree.get(url) return UpdateRawFile( target="/".join(url), - b64Data=base64.b64encode(cast(bytes, data)).decode("utf-8"), + b64Data=base64.b64encode(t.cast(bytes, data)).decode("utf-8"), command_context=self.command_context, ) @@ -390,7 +390,7 @@ def generate_update_comments( self, study_tree: FileStudyTree, ) -> ICommand: - content = cast(bytes, study_tree.get(["settings", "comments"])) + content = t.cast(bytes, study_tree.get(["settings", "comments"])) comments = content.decode("utf-8") return UpdateComments( comments=comments, @@ -414,8 +414,8 @@ def generate_update_playlist( def generate_replace_matrix( self, study_tree: FileStudyTree, - url: List[str], - default_value: Optional[str] = None, + url: t.List[str], + default_value: t.Optional[str] = None, ) -> ICommand: data = study_tree.get(url) if isinstance(data, str): @@ -425,7 +425,7 @@ def generate_replace_matrix( else: matrix = [[]] if default_value is None else default_value if isinstance(matrix, np.ndarray): - matrix = cast(List[List[MatrixData]], matrix.tolist()) + matrix = t.cast(t.List[t.List[MatrixData]], matrix.tolist()) return ReplaceMatrix( target="/".join(url), matrix=matrix, diff --git a/antarest/study/storage/variantstudy/business/command_reverter.py b/antarest/study/storage/variantstudy/business/command_reverter.py index fa4de66c06..089589576f 100644 --- a/antarest/study/storage/variantstudy/business/command_reverter.py +++ b/antarest/study/storage/variantstudy/business/command_reverter.py @@ -1,13 +1,16 @@ import logging +import typing as t from pathlib import Path -from typing import Callable, Dict, List from antarest.study.storage.rawstudy.model.filesystem.config.model import transform_name_to_id from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy from antarest.study.storage.rawstudy.model.filesystem.folder_node import ChildNotFoundError from antarest.study.storage.variantstudy.model.command.common import CommandName from antarest.study.storage.variantstudy.model.command.create_area import CreateArea -from antarest.study.storage.variantstudy.model.command.create_binding_constraint import CreateBindingConstraint +from antarest.study.storage.variantstudy.model.command.create_binding_constraint import ( + TERM_MATRICES, + CreateBindingConstraint, +) from antarest.study.storage.variantstudy.model.command.create_cluster import CreateCluster from antarest.study.storage.variantstudy.model.command.create_district import CreateDistrict from antarest.study.storage.variantstudy.model.command.create_link import CreateLink @@ -35,39 +38,39 @@ class CommandReverter: def __init__(self) -> None: - self.method_dict: Dict[ + self.method_dict: t.Dict[ CommandName, - Callable[[ICommand, List[ICommand], FileStudy], List[ICommand]], + t.Callable[[ICommand, t.List[ICommand], FileStudy], t.List[ICommand]], ] = {command_name: getattr(self, f"_revert_{command_name.value}") for command_name in CommandName} @staticmethod - def _revert_create_area(base_command: CreateArea, history: List["ICommand"], base: FileStudy) -> List[ICommand]: + def _revert_create_area(base_command: CreateArea, history: t.List["ICommand"], base: FileStudy) -> t.List[ICommand]: area_id = transform_name_to_id(base_command.area_name) return [RemoveArea(id=area_id, command_context=base_command.command_context)] @staticmethod - def _revert_remove_area(base_command: RemoveArea, history: List["ICommand"], base: FileStudy) -> List[ICommand]: + def _revert_remove_area(base_command: RemoveArea, history: t.List["ICommand"], base: FileStudy) -> t.List[ICommand]: raise NotImplementedError("The revert function for RemoveArea is not available") @staticmethod def _revert_create_district( base_command: CreateDistrict, - history: List["ICommand"], + history: t.List["ICommand"], base: FileStudy, - ) -> List[ICommand]: + ) -> t.List[ICommand]: district_id = transform_name_to_id(base_command.name) return [RemoveDistrict(id=district_id, command_context=base_command.command_context)] @staticmethod def _revert_remove_district( base_command: RemoveDistrict, - history: List["ICommand"], + history: t.List["ICommand"], base: FileStudy, - ) -> List[ICommand]: + ) -> t.List[ICommand]: raise NotImplementedError("The revert function for RemoveDistrict is not available") @staticmethod - def _revert_create_link(base_command: CreateLink, history: List["ICommand"], base: FileStudy) -> List[ICommand]: + def _revert_create_link(base_command: CreateLink, history: t.List["ICommand"], base: FileStudy) -> t.List[ICommand]: return [ RemoveLink( area1=base_command.area1, @@ -77,24 +80,24 @@ def _revert_create_link(base_command: CreateLink, history: List["ICommand"], bas ] @staticmethod - def _revert_remove_link(base_command: RemoveLink, history: List["ICommand"], base: FileStudy) -> List[ICommand]: + def _revert_remove_link(base_command: RemoveLink, history: t.List["ICommand"], base: FileStudy) -> t.List[ICommand]: raise NotImplementedError("The revert function for RemoveLink is not available") @staticmethod def _revert_create_binding_constraint( base_command: CreateBindingConstraint, - history: List["ICommand"], + history: t.List["ICommand"], base: FileStudy, - ) -> List[ICommand]: + ) -> t.List[ICommand]: bind_id = transform_name_to_id(base_command.name) return [RemoveBindingConstraint(id=bind_id, command_context=base_command.command_context)] @staticmethod def _revert_update_binding_constraint( base_command: UpdateBindingConstraint, - history: List["ICommand"], + history: t.List["ICommand"], base: FileStudy, - ) -> List[ICommand]: + ) -> t.List[ICommand]: for command in reversed(history): if isinstance(command, UpdateBindingConstraint) and command.id == base_command.id: return [command] @@ -112,8 +115,8 @@ def _revert_update_binding_constraint( } matrix_service = command.command_context.matrix_service - for matrix_name in ["values", "less_term_matrix", "equal_term_matrix", "greater_term_matrix"]: - matrix = command.__getattribute__(matrix_name) + for matrix_name in ["values"] + TERM_MATRICES: + matrix = getattr(command, matrix_name) if matrix is not None: args[matrix_name] = matrix_service.get_matrix_id(matrix) @@ -124,24 +127,24 @@ def _revert_update_binding_constraint( @staticmethod def _revert_remove_binding_constraint( base_command: RemoveBindingConstraint, - history: List["ICommand"], + history: t.List["ICommand"], base: FileStudy, - ) -> List[ICommand]: + ) -> t.List[ICommand]: raise NotImplementedError("The revert function for RemoveBindingConstraint is not available") @staticmethod def _revert_update_scenario_builder( base_command: UpdateScenarioBuilder, - history: List["ICommand"], + history: t.List["ICommand"], base: FileStudy, - ) -> List[ICommand]: + ) -> t.List[ICommand]: # todo make the diff between base study scenariobuilder data and base_command raise NotImplementedError("The revert function for UpdateScenarioBuilder is not available") @staticmethod def _revert_create_cluster( - base_command: CreateCluster, history: List["ICommand"], base: FileStudy - ) -> List[ICommand]: + base_command: CreateCluster, history: t.List["ICommand"], base: FileStudy + ) -> t.List[ICommand]: cluster_id = transform_name_to_id(base_command.cluster_name) return [ RemoveCluster( @@ -153,16 +156,16 @@ def _revert_create_cluster( @staticmethod def _revert_remove_cluster( - base_command: RemoveCluster, history: List["ICommand"], base: FileStudy - ) -> List[ICommand]: + base_command: RemoveCluster, history: t.List["ICommand"], base: FileStudy + ) -> t.List[ICommand]: raise NotImplementedError("The revert function for RemoveCluster is not available") @staticmethod def _revert_create_renewables_cluster( base_command: CreateRenewablesCluster, - history: List["ICommand"], + history: t.List["ICommand"], base: FileStudy, - ) -> List[ICommand]: + ) -> t.List[ICommand]: cluster_id = transform_name_to_id(base_command.cluster_name) return [ RemoveRenewablesCluster( @@ -175,17 +178,17 @@ def _revert_create_renewables_cluster( @staticmethod def _revert_remove_renewables_cluster( base_command: RemoveRenewablesCluster, - history: List["ICommand"], + history: t.List["ICommand"], base: FileStudy, - ) -> List[ICommand]: + ) -> t.List[ICommand]: raise NotImplementedError("The revert function for RemoveRenewablesCluster is not available") @staticmethod def _revert_create_st_storage( base_command: CreateSTStorage, - history: List["ICommand"], + history: t.List["ICommand"], base: FileStudy, - ) -> List[ICommand]: + ) -> t.List[ICommand]: storage_id = base_command.parameters.id return [ RemoveSTStorage( @@ -198,15 +201,15 @@ def _revert_create_st_storage( @staticmethod def _revert_remove_st_storage( base_command: RemoveSTStorage, - history: List["ICommand"], + history: t.List["ICommand"], base: FileStudy, - ) -> List[ICommand]: + ) -> t.List[ICommand]: raise NotImplementedError("The revert function for RemoveSTStorage is not available") @staticmethod def _revert_replace_matrix( - base_command: ReplaceMatrix, history: List["ICommand"], base: FileStudy - ) -> List[ICommand]: + base_command: ReplaceMatrix, history: t.List["ICommand"], base: FileStudy + ) -> t.List[ICommand]: for command in reversed(history): if isinstance(command, ReplaceMatrix) and command.target == base_command.target: return [command] @@ -219,8 +222,10 @@ def _revert_replace_matrix( return [] # if the matrix does not exist, there is nothing to revert @staticmethod - def _revert_update_config(base_command: UpdateConfig, history: List["ICommand"], base: FileStudy) -> List[ICommand]: - update_config_list: List[UpdateConfig] = [] + def _revert_update_config( + base_command: UpdateConfig, history: t.List["ICommand"], base: FileStudy + ) -> t.List[ICommand]: + update_config_list: t.List[UpdateConfig] = [] self_target_path = Path(base_command.target) parent_path: Path = Path("../model/command") for command in reversed(history): @@ -234,7 +239,7 @@ def _revert_update_config(base_command: UpdateConfig, history: List["ICommand"], parent_path = Path(command.target) break - output_list: List[ICommand] = [ + output_list: t.List[ICommand] = [ command for command in update_config_list[::-1] if parent_path in Path(command.target).parents or str(parent_path) == command.target @@ -257,9 +262,9 @@ def _revert_update_config(base_command: UpdateConfig, history: List["ICommand"], @staticmethod def _revert_update_comments( base_command: UpdateComments, - history: List["ICommand"], + history: t.List["ICommand"], base: FileStudy, - ) -> List[ICommand]: + ) -> t.List[ICommand]: for command in reversed(history): if isinstance(command, UpdateComments): return [command] @@ -272,9 +277,9 @@ def _revert_update_comments( @staticmethod def _revert_update_playlist( base_command: UpdatePlaylist, - history: List["ICommand"], + history: t.List["ICommand"], base: FileStudy, - ) -> List[ICommand]: + ) -> t.List[ICommand]: for command in reversed(history): if isinstance(command, UpdatePlaylist): return [command] @@ -285,33 +290,39 @@ def _revert_update_playlist( return [] # if the file does not exist, there is nothing to revert @staticmethod - def _revert_update_file(base_command: UpdateRawFile, history: List["ICommand"], base: FileStudy) -> List[ICommand]: + def _revert_update_file( + base_command: UpdateRawFile, history: t.List["ICommand"], base: FileStudy + ) -> t.List[ICommand]: for command in reversed(history): if isinstance(command, UpdateRawFile) and command.target == base_command.target: return [command] - return [base_command.get_command_extractor().generate_update_rawfile(base.tree, base_command.target.split("/"))] + extractor = base_command.get_command_extractor() + return [extractor.generate_update_raw_file(base.tree, base_command.target.split("/"))] @staticmethod def _revert_update_district( base_command: UpdateDistrict, - history: List["ICommand"], + history: t.List["ICommand"], base: FileStudy, - ) -> List[ICommand]: + ) -> t.List[ICommand]: for command in reversed(history): - if isinstance(command, UpdateDistrict) and command.id == base_command.id: - return [command] - elif isinstance(command, CreateDistrict) and transform_name_to_id(command.name) == base_command.id: + # fmt: off + if ( + (isinstance(command, UpdateDistrict) and command.id == base_command.id) or + (isinstance(command, CreateDistrict) and transform_name_to_id(command.name) == base_command.id) + ): return [command] - return [base_command.get_command_extractor().generate_update_district(base, base_command.id)] + extractor = base_command.get_command_extractor() + return [extractor.generate_update_district(base, base_command.id)] def revert( self, base_command: ICommand, - history: List["ICommand"], + history: t.List["ICommand"], base: FileStudy, - ) -> List[ICommand]: + ) -> t.List[ICommand]: """ Generate a list of commands to revert the given command. diff --git a/antarest/study/storage/variantstudy/business/utils_binding_constraint.py b/antarest/study/storage/variantstudy/business/utils_binding_constraint.py index 37f08b0323..4db7c525d8 100644 --- a/antarest/study/storage/variantstudy/business/utils_binding_constraint.py +++ b/antarest/study/storage/variantstudy/business/utils_binding_constraint.py @@ -1,95 +1,10 @@ import typing as t -from antarest.core.model import JSON -from antarest.matrixstore.model import MatrixData from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import ( BindingConstraintDTO, BindingConstraintFrequency, ) from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig -from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy -from antarest.study.storage.variantstudy.model.command.common import BindingConstraintOperator - - -def apply_binding_constraint( - study_data: FileStudy, - binding_constraints: JSON, - new_key: str, - bd_id: str, - name: str, - comments: t.Optional[str], - enabled: bool, - freq: BindingConstraintFrequency, - operator: BindingConstraintOperator, - coeffs: t.Dict[str, t.List[float]], - values: t.Union[t.List[t.List[MatrixData]], str, None], - less_term_matrix: t.Union[t.List[t.List[MatrixData]], str, None], - greater_term_matrix: t.Union[t.List[t.List[MatrixData]], str, None], - equal_term_matrix: t.Union[t.List[t.List[MatrixData]], str, None], - filter_year_by_year: t.Optional[str] = None, - filter_synthesis: t.Optional[str] = None, - group: t.Optional[str] = None, -) -> str: - version = study_data.config.version - binding_constraints[new_key] = { - "name": name, - "id": bd_id, - "enabled": enabled, - "type": freq.value, - "operator": operator.value, - } - if group: - binding_constraints[new_key]["group"] = group - if version >= 830: - if filter_year_by_year: - binding_constraints[new_key]["filter-year-by-year"] = filter_year_by_year - if filter_synthesis: - binding_constraints[new_key]["filter-synthesis"] = filter_synthesis - if comments is not None: - binding_constraints[new_key]["comments"] = comments - - for link_or_cluster in coeffs: - if "%" in link_or_cluster: - area_1, area_2 = link_or_cluster.split("%") - if area_1 not in study_data.config.areas or area_2 not in study_data.config.areas[area_1].links: - return f"Link '{link_or_cluster}' does not exist in binding constraint '{bd_id}'" - elif "." in link_or_cluster: - # Cluster IDs are stored in lower case in the binding constraints file. - area, cluster_id = link_or_cluster.split(".") - thermal_ids = {thermal.id.lower() for thermal in study_data.config.areas[area].thermals} - if area not in study_data.config.areas or cluster_id.lower() not in thermal_ids: - return f"Cluster '{link_or_cluster}' does not exist in binding constraint '{bd_id}'" - else: - raise NotImplementedError(f"Invalid link or thermal ID: {link_or_cluster}") - - # this is weird because Antares Simulator only accept int as offset - if len(coeffs[link_or_cluster]) == 2: - coeffs[link_or_cluster][1] = int(coeffs[link_or_cluster][1]) - - binding_constraints[new_key][link_or_cluster] = "%".join( - [str(coeff_val) for coeff_val in coeffs[link_or_cluster]] - ) - parse_bindings_coeffs_and_save_into_config(bd_id, study_data.config, coeffs) - study_data.tree.save( - binding_constraints, - ["input", "bindingconstraints", "bindingconstraints"], - ) - if values: - if not isinstance(values, str): # pragma: no cover - raise TypeError(repr(values)) - if version < 870: - study_data.tree.save(values, ["input", "bindingconstraints", bd_id]) - for matrix_term, matrix_name, matrix_alias in zip( - [less_term_matrix, greater_term_matrix, equal_term_matrix], - ["less_term_matrix", "greater_term_matrix", "equal_term_matrix"], - ["lt", "gt", "eq"], - ): - if matrix_term: - if not isinstance(matrix_term, str): # pragma: no cover - raise TypeError(repr(matrix_term)) - if version >= 870: - study_data.tree.save(matrix_term, ["input", "bindingconstraints", f"{bd_id}_{matrix_alias}"]) - return "" # success def parse_bindings_coeffs_and_save_into_config( diff --git a/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py b/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py index 4bdfc714c4..70a1c1f627 100644 --- a/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py +++ b/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py @@ -1,3 +1,4 @@ +import json import typing as t from abc import ABCMeta @@ -5,13 +6,13 @@ from pydantic import BaseModel, Extra, Field, root_validator from antarest.matrixstore.model import MatrixData +from antarest.study.business.all_optional_meta import AllOptionalMetaclass from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import BindingConstraintFrequency from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig, transform_name_to_id from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy from antarest.study.storage.variantstudy.business.matrix_constants_generator import GeneratorMatrixConstants from antarest.study.storage.variantstudy.business.utils import validate_matrix from antarest.study.storage.variantstudy.business.utils_binding_constraint import ( - apply_binding_constraint, parse_bindings_coeffs_and_save_into_config, ) from antarest.study.storage.variantstudy.model.command.common import ( @@ -22,6 +23,9 @@ from antarest.study.storage.variantstudy.model.command.icommand import MATCH_SIGNATURE_SEPARATOR, ICommand from antarest.study.storage.variantstudy.model.model import CommandDTO +TERM_MATRICES = ["less_term_matrix", "equal_term_matrix", "greater_term_matrix"] +DEFAULT_GROUP = "default" + MatrixType = t.List[t.List[MatrixData]] EXPECTED_MATRIX_SHAPES = { @@ -63,17 +67,77 @@ def check_matrix_values(time_step: BindingConstraintFrequency, values: MatrixTyp raise ValueError("Matrix values cannot contain NaN") -class BindingConstraintProperties(BaseModel, extra=Extra.forbid, allow_population_by_field_name=True): +# ================================================================================= +# Binding constraint properties classes +# ================================================================================= + + +class BindingConstraintPropertiesBase(BaseModel, extra=Extra.forbid, allow_population_by_field_name=True): enabled: bool = True - time_step: BindingConstraintFrequency = BindingConstraintFrequency.HOURLY + time_step: BindingConstraintFrequency = Field(BindingConstraintFrequency.HOURLY, alias="type") operator: BindingConstraintOperator = BindingConstraintOperator.EQUAL - comments: t.Optional[str] = "" - filter_year_by_year: t.Optional[str] = "" - filter_synthesis: t.Optional[str] = "" + comments: str = "" + + @classmethod + def from_dict(cls, **attrs: t.Any) -> "BindingConstraintPropertiesBase": + """ + Instantiate a class from a dictionary excluding unknown or `None` fields. + """ + attrs = {k: v for k, v in attrs.items() if k in cls.__fields__ and v is not None} + return cls(**attrs) + + +class BindingConstraintProperties830(BindingConstraintPropertiesBase): + filter_year_by_year: str = Field("", alias="filter-year-by-year") + filter_synthesis: str = Field("", alias="filter-synthesis") + + +class BindingConstraintProperties870(BindingConstraintProperties830): + group: str = DEFAULT_GROUP + + +BindingConstraintProperties = t.Union[ + BindingConstraintPropertiesBase, + BindingConstraintProperties830, + BindingConstraintProperties870, +] + + +def get_binding_constraint_config_cls(study_version: t.Union[str, int]) -> t.Type[BindingConstraintProperties]: + """ + Retrieves the binding constraint configuration class based on the study version. + """ + version = int(study_version) + if version >= 870: + return BindingConstraintProperties870 + elif version >= 830: + return BindingConstraintProperties830 + else: + return BindingConstraintPropertiesBase + + +def create_binding_constraint_config(study_version: t.Union[str, int], **kwargs: t.Any) -> BindingConstraintProperties: + """ + Factory method to create a binding constraint configuration model. + + Args: + study_version: The version of the study. + **kwargs: The properties to be used to initialize the model. + + Returns: + The binding_constraint configuration model. + """ + cls = get_binding_constraint_config_cls(study_version) + return cls.from_dict(**kwargs) -class BindingConstraintProperties870(BindingConstraintProperties): - group: t.Optional[str] = "" +class OptionalProperties(BindingConstraintProperties870, metaclass=AllOptionalMetaclass, use_none=True): + pass + + +# ================================================================================= +# Binding constraint matrices classes +# ================================================================================= class BindingConstraintMatrices(BaseModel, extra=Extra.forbid, allow_population_by_field_name=True): @@ -114,35 +178,41 @@ def check_matrices( "You cannot fill 'values' (matrix before v8.7) and a matrix term:" " 'less_term_matrix', 'greater_term_matrix' or 'equal_term_matrix' (matrices since v8.7)" ) + return values -class AbstractBindingConstraintCommand( - BindingConstraintProperties870, BindingConstraintMatrices, ICommand, metaclass=ABCMeta -): +# ================================================================================= +# Binding constraint command classes +# ================================================================================= + + +class AbstractBindingConstraintCommand(OptionalProperties, BindingConstraintMatrices, ICommand, metaclass=ABCMeta): """ Abstract class for binding constraint commands. """ - coeffs: t.Dict[str, t.List[float]] + coeffs: t.Optional[t.Dict[str, t.List[float]]] def to_dto(self) -> CommandDTO: - args = { - "enabled": self.enabled, - "time_step": self.time_step.value, - "operator": self.operator.value, - "coeffs": self.coeffs, - "comments": self.comments, - "filter_year_by_year": self.filter_year_by_year, - "filter_synthesis": self.filter_synthesis, - } + json_command = json.loads(self.json(exclude={"command_context"})) + args = {} + for field in ["enabled", "coeffs", "comments", "time_step", "operator"]: + if json_command[field]: + args[field] = json_command[field] + + # The `filter_year_by_year` and `filter_synthesis` attributes are only available for studies since v8.3 + if self.filter_synthesis: + args["filter_synthesis"] = self.filter_synthesis + if self.filter_year_by_year: + args["filter_year_by_year"] = self.filter_year_by_year # The `group` attribute is only available for studies since v8.7 if self.group: args["group"] = self.group matrix_service = self.command_context.matrix_service - for matrix_name in ["values", "less_term_matrix", "greater_term_matrix", "equal_term_matrix"]: + for matrix_name in TERM_MATRICES + ["values"]: matrix_attr = getattr(self, matrix_name, None) if matrix_attr is not None: args[matrix_name] = matrix_service.get_matrix_id(matrix_attr) @@ -163,11 +233,9 @@ def get_inner_matrices(self) -> t.List[str]: ] def get_corresponding_matrices( - self, v: t.Optional[t.Union[MatrixType, str]], version: int, create: bool + self, v: t.Optional[t.Union[MatrixType, str]], time_step: BindingConstraintFrequency, version: int, create: bool ) -> t.Optional[str]: - constants: GeneratorMatrixConstants - constants = self.command_context.generator_matrix_constants - time_step = self.time_step + constants: GeneratorMatrixConstants = self.command_context.generator_matrix_constants if v is None: if not create: @@ -198,17 +266,81 @@ def get_corresponding_matrices( raise TypeError(repr(v)) def validates_and_fills_matrices( - self, *, specific_matrices: t.Optional[t.List[str]], version: int, create: bool + self, + *, + time_step: BindingConstraintFrequency, + specific_matrices: t.Optional[t.List[str]], + version: int, + create: bool, ) -> None: if version < 870: - self.values = self.get_corresponding_matrices(self.values, version, create) + self.values = self.get_corresponding_matrices(self.values, time_step, version, create) elif specific_matrices: for matrix in specific_matrices: - setattr(self, matrix, self.get_corresponding_matrices(getattr(self, matrix), version, create)) + setattr( + self, matrix, self.get_corresponding_matrices(getattr(self, matrix), time_step, version, create) + ) else: - self.less_term_matrix = self.get_corresponding_matrices(self.less_term_matrix, version, create) - self.greater_term_matrix = self.get_corresponding_matrices(self.greater_term_matrix, version, create) - self.equal_term_matrix = self.get_corresponding_matrices(self.equal_term_matrix, version, create) + self.less_term_matrix = self.get_corresponding_matrices(self.less_term_matrix, time_step, version, create) + self.greater_term_matrix = self.get_corresponding_matrices( + self.greater_term_matrix, time_step, version, create + ) + self.equal_term_matrix = self.get_corresponding_matrices(self.equal_term_matrix, time_step, version, create) + + def apply_binding_constraint( + self, study_data: FileStudy, binding_constraints: t.Dict[str, t.Any], new_key: str, bd_id: str + ) -> CommandOutput: + version = study_data.config.version + + if self.coeffs: + for link_or_cluster in self.coeffs: + if "%" in link_or_cluster: + area_1, area_2 = link_or_cluster.split("%") + if area_1 not in study_data.config.areas or area_2 not in study_data.config.areas[area_1].links: + return CommandOutput( + status=False, + message=f"Link '{link_or_cluster}' does not exist in binding constraint '{bd_id}'", + ) + elif "." in link_or_cluster: + # Cluster IDs are stored in lower case in the binding constraints file. + area, cluster_id = link_or_cluster.split(".") + thermal_ids = {thermal.id.lower() for thermal in study_data.config.areas[area].thermals} + if area not in study_data.config.areas or cluster_id.lower() not in thermal_ids: + return CommandOutput( + status=False, + message=f"Cluster '{link_or_cluster}' does not exist in binding constraint '{bd_id}'", + ) + else: + raise NotImplementedError(f"Invalid link or thermal ID: {link_or_cluster}") + + # this is weird because Antares Simulator only accept int as offset + if len(self.coeffs[link_or_cluster]) == 2: + self.coeffs[link_or_cluster][1] = int(self.coeffs[link_or_cluster][1]) + + binding_constraints[new_key][link_or_cluster] = "%".join( + [str(coeff_val) for coeff_val in self.coeffs[link_or_cluster]] + ) + parse_bindings_coeffs_and_save_into_config(bd_id, study_data.config, self.coeffs or {}) + study_data.tree.save( + binding_constraints, + ["input", "bindingconstraints", "bindingconstraints"], + ) + if self.values: + if not isinstance(self.values, str): # pragma: no cover + raise TypeError(repr(self.values)) + if version < 870: + study_data.tree.save(self.values, ["input", "bindingconstraints", bd_id]) + for matrix_term, matrix_name, matrix_alias in zip( + [self.less_term_matrix, self.equal_term_matrix, self.greater_term_matrix], + TERM_MATRICES, + ["lt", "eq", "gt"], + ): + if matrix_term: + if not isinstance(matrix_term, str): # pragma: no cover + raise TypeError(repr(matrix_term)) + if version >= 870: + study_data.tree.save(matrix_term, ["input", "bindingconstraints", f"{bd_id}_{matrix_alias}"]) + return CommandOutput(status=True) class CreateBindingConstraint(AbstractBindingConstraintCommand): @@ -224,35 +356,26 @@ class CreateBindingConstraint(AbstractBindingConstraintCommand): def _apply_config(self, study_data_config: FileStudyTreeConfig) -> t.Tuple[CommandOutput, t.Dict[str, t.Any]]: bd_id = transform_name_to_id(self.name) - parse_bindings_coeffs_and_save_into_config(bd_id, study_data_config, self.coeffs) + parse_bindings_coeffs_and_save_into_config(bd_id, study_data_config, self.coeffs or {}) return CommandOutput(status=True), {} def _apply(self, study_data: FileStudy) -> CommandOutput: binding_constraints = study_data.tree.get(["input", "bindingconstraints", "bindingconstraints"]) - new_key = len(binding_constraints) + new_key = str(len(binding_constraints)) bd_id = transform_name_to_id(self.name) - self.validates_and_fills_matrices(specific_matrices=None, version=study_data.config.version, create=True) - err_msg = apply_binding_constraint( - study_data, - binding_constraints, - str(new_key), - bd_id, - self.name, - self.comments, - self.enabled, - self.time_step, - self.operator, - self.coeffs, - self.values, - self.less_term_matrix, - self.greater_term_matrix, - self.equal_term_matrix, - self.filter_year_by_year, - self.filter_synthesis, - self.group, + study_version = study_data.config.version + props = create_binding_constraint_config(study_version, **self.dict()) + obj = json.loads(props.json(by_alias=True)) + + new_binding = {"id": bd_id, "name": self.name, **obj} + + binding_constraints[new_key] = new_binding + + self.validates_and_fills_matrices( + time_step=props.time_step, specific_matrices=None, version=study_version, create=True ) - return CommandOutput(status=not err_msg, message=err_msg) + return super().apply_binding_constraint(study_data, binding_constraints, new_key, bd_id) def to_dto(self) -> CommandDTO: dto = super().to_dto() @@ -262,49 +385,32 @@ def to_dto(self) -> CommandDTO: def match_signature(self) -> str: return str(self.command_name.value + MATCH_SIGNATURE_SEPARATOR + self.name) - def match(self, other: ICommand, equal: bool = False) -> bool: - if not isinstance(other, CreateBindingConstraint): - return False - simple_match = self.name == other.name - if not equal: - return simple_match - return ( - simple_match - and self.enabled == other.enabled - and self.time_step == other.time_step - and self.operator == other.operator - and self.coeffs == other.coeffs - and self.values == other.values - and self.comments == other.comments - and self.less_term_matrix == other.less_term_matrix - and self.greater_term_matrix == other.greater_term_matrix - and self.equal_term_matrix == other.equal_term_matrix - and self.group == other.group - and self.filter_synthesis == other.filter_synthesis - and self.filter_year_by_year == other.filter_year_by_year - ) - def _create_diff(self, other: "ICommand") -> t.List["ICommand"]: from antarest.study.storage.variantstudy.model.command.update_binding_constraint import UpdateBindingConstraint other = t.cast(CreateBindingConstraint, other) bd_id = transform_name_to_id(self.name) - - args = { - "id": bd_id, - "enabled": other.enabled, - "time_step": other.time_step, - "operator": other.operator, - "coeffs": other.coeffs, - "filter_year_by_year": other.filter_year_by_year, - "filter_synthesis": other.filter_synthesis, - "comments": other.comments, - "command_context": other.command_context, - "group": other.group, - } + args = {"id": bd_id, "command_context": other.command_context} + + excluded_fields = frozenset(ICommand.__fields__) + self_command = json.loads(self.json(exclude=excluded_fields)) + other_command = json.loads(other.json(exclude=excluded_fields)) + properties = [ + "enabled", + "coeffs", + "comments", + "filter_year_by_year", + "filter_synthesis", + "group", + "time_step", + "operator", + ] + for prop in properties: + if self_command[prop] != other_command[prop]: + args[prop] = other_command[prop] matrix_service = self.command_context.matrix_service - for matrix_name in ["values", "less_term_matrix", "equal_term_matrix", "greater_term_matrix"]: + for matrix_name in ["values"] + TERM_MATRICES: self_matrix = getattr(self, matrix_name) # matrix, ID or `None` other_matrix = getattr(other, matrix_name) # matrix, ID or `None` self_matrix_id = None if self_matrix is None else matrix_service.get_matrix_id(self_matrix) @@ -313,3 +419,10 @@ def _create_diff(self, other: "ICommand") -> t.List["ICommand"]: args[matrix_name] = other_matrix_id return [UpdateBindingConstraint(**args)] + + def match(self, other: "ICommand", equal: bool = False) -> bool: + if not isinstance(other, self.__class__): + return False + if not equal: + return self.name == other.name + return super().match(other, equal) diff --git a/antarest/study/storage/variantstudy/model/command/icommand.py b/antarest/study/storage/variantstudy/model/command/icommand.py index 3fd31f58fe..72eb6bfa02 100644 --- a/antarest/study/storage/variantstudy/model/command/icommand.py +++ b/antarest/study/storage/variantstudy/model/command/icommand.py @@ -110,7 +110,6 @@ def match_signature(self) -> str: """Returns the command signature.""" raise NotImplementedError() - @abstractmethod def match(self, other: "ICommand", equal: bool = False) -> bool: """ Indicate if the other command is the same type and targets the same element. @@ -121,7 +120,12 @@ def match(self, other: "ICommand", equal: bool = False) -> bool: Returns: True if the command match with the other else False """ - raise NotImplementedError() + if not isinstance(other, self.__class__): + return False + excluded_fields = set(ICommand.__fields__) + this_values = self.dict(exclude=excluded_fields) + that_values = other.dict(exclude=excluded_fields) + return this_values == that_values @abstractmethod def _create_diff(self, other: "ICommand") -> t.List["ICommand"]: diff --git a/antarest/study/storage/variantstudy/model/command/update_binding_constraint.py b/antarest/study/storage/variantstudy/model/command/update_binding_constraint.py index f1218d86fc..8befaccac1 100644 --- a/antarest/study/storage/variantstudy/model/command/update_binding_constraint.py +++ b/antarest/study/storage/variantstudy/model/command/update_binding_constraint.py @@ -1,12 +1,17 @@ -from typing import Any, Dict, List, Optional, Tuple +import json +from typing import Any, Dict, List, Mapping, Optional, Tuple from antarest.core.model import JSON from antarest.matrixstore.model import MatrixData +from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import BindingConstraintFrequency from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy -from antarest.study.storage.variantstudy.business.utils_binding_constraint import apply_binding_constraint from antarest.study.storage.variantstudy.model.command.common import CommandName, CommandOutput -from antarest.study.storage.variantstudy.model.command.create_binding_constraint import AbstractBindingConstraintCommand +from antarest.study.storage.variantstudy.model.command.create_binding_constraint import ( + TERM_MATRICES, + AbstractBindingConstraintCommand, + create_binding_constraint_config, +) from antarest.study.storage.variantstudy.model.command.icommand import MATCH_SIGNATURE_SEPARATOR, ICommand from antarest.study.storage.variantstudy.model.model import CommandDTO @@ -33,77 +38,71 @@ class UpdateBindingConstraint(AbstractBindingConstraintCommand): def _apply_config(self, study_data: FileStudyTreeConfig) -> Tuple[CommandOutput, Dict[str, Any]]: return CommandOutput(status=True), {} + def _find_binding_config(self, binding_constraints: Mapping[str, JSON]) -> Optional[Tuple[str, JSON]]: + """ + Find the binding constraint with the given ID in the list of binding constraints, + and returns its index and configuration, or `None` if it does not exist. + """ + for index, binding_config in binding_constraints.items(): + if binding_config["id"] == self.id: + # convert to string because the index could be an integer + return str(index), binding_config + return None + def _apply(self, study_data: FileStudy) -> CommandOutput: binding_constraints = study_data.tree.get(["input", "bindingconstraints", "bindingconstraints"]) - binding: Optional[JSON] = None - new_key: Optional[str] = None - for key, binding_config in binding_constraints.items(): - if binding_config["id"] == self.id: - binding = binding_config - new_key = key - break - if binding is None or new_key is None: + index_and_cfg = self._find_binding_config(binding_constraints) + if index_and_cfg is None: return CommandOutput( status=False, - message="Failed to retrieve existing binding constraint", + message="The binding constraint with ID '{self.id}' does not exist", ) - # fmt: off - updated_matrices = [term for term in ["less_term_matrix", "equal_term_matrix", "greater_term_matrix"] if self.__getattribute__(term)] - self.validates_and_fills_matrices(specific_matrices=updated_matrices or None, version=study_data.config.version, create=False) - # fmt: on - - err_msg = apply_binding_constraint( - study_data, - binding_constraints, - new_key, - self.id, - binding["name"], - self.comments, - self.enabled, - self.time_step, - self.operator, - self.coeffs, - self.values, - self.less_term_matrix, - self.greater_term_matrix, - self.equal_term_matrix, - self.filter_year_by_year, - self.filter_synthesis, - self.group, + index, actual_cfg = index_and_cfg + + updated_matrices = [term for term in TERM_MATRICES if hasattr(self, term) and getattr(self, term)] + study_version = study_data.config.version + time_step = self.time_step or BindingConstraintFrequency(actual_cfg.get("type")) + self.validates_and_fills_matrices( + time_step=time_step, specific_matrices=updated_matrices or None, version=study_version, create=False ) - return CommandOutput(status=not err_msg, message=err_msg) + + study_version = study_data.config.version + props = create_binding_constraint_config(study_version, **self.dict()) + obj = json.loads(props.json(by_alias=True, exclude_unset=True)) + + updated_cfg = binding_constraints[index] + updated_cfg.update(obj) + + if self.coeffs: + # Remove terms which IDs contain a "%" or a "." in their name + term_ids = {k for k in updated_cfg if "%" in k or "." in k} + binding_constraints[index] = {k: v for k, v in updated_cfg.items() if k not in term_ids} + + return super().apply_binding_constraint(study_data, binding_constraints, index, self.id) def to_dto(self) -> CommandDTO: - dto = super().to_dto() - dto.args["id"] = self.id # type: ignore - return dto + matrices = ["values"] + TERM_MATRICES + matrix_service = self.command_context.matrix_service + + excluded_fields = frozenset(ICommand.__fields__) + json_command = json.loads(self.json(exclude=excluded_fields, exclude_none=True)) + for key in json_command: + if key in matrices: + json_command[key] = matrix_service.get_matrix_id(json_command[key]) + + return CommandDTO(action=self.command_name.value, args=json_command, version=self.version) def match_signature(self) -> str: return str(self.command_name.value + MATCH_SIGNATURE_SEPARATOR + self.id) - def match(self, other: ICommand, equal: bool = False) -> bool: - if not isinstance(other, UpdateBindingConstraint): - return False - simple_match = self.id == other.id - if not equal: - return simple_match - return ( - simple_match - and self.enabled == other.enabled - and self.time_step == other.time_step - and self.operator == other.operator - and self.coeffs == other.coeffs - and self.values == other.values - and self.less_term_matrix == other.less_term_matrix - and self.greater_term_matrix == other.greater_term_matrix - and self.equal_term_matrix == other.equal_term_matrix - and self.comments == other.comments - and self.group == other.group - and self.filter_synthesis == other.filter_synthesis - and self.filter_year_by_year == other.filter_year_by_year - ) - def _create_diff(self, other: "ICommand") -> List["ICommand"]: return [other] + + def match(self, other: "ICommand", equal: bool = False) -> bool: + if not isinstance(other, self.__class__): + return False + if not equal: + return self.id == other.id + return super().match(other, equal) diff --git a/antarest/study/storage/variantstudy/model/interfaces.py b/antarest/study/storage/variantstudy/model/interfaces.py index 93def93234..31b14fabd7 100644 --- a/antarest/study/storage/variantstudy/model/interfaces.py +++ b/antarest/study/storage/variantstudy/model/interfaces.py @@ -62,7 +62,7 @@ def generate_update_config( raise NotImplementedError() @abstractmethod - def generate_update_rawfile( + def generate_update_raw_file( self, study_tree: FileStudyTree, url: List[str], diff --git a/antarest/study/storage/variantstudy/variant_command_generator.py b/antarest/study/storage/variantstudy/variant_command_generator.py index a08ec86b4c..7e56f370ec 100644 --- a/antarest/study/storage/variantstudy/variant_command_generator.py +++ b/antarest/study/storage/variantstudy/variant_command_generator.py @@ -81,6 +81,11 @@ def _generate( cmd_notifier.index = index stopwatch.log_elapsed(cmd_notifier) + # stop variant generation as soon as a command fails + if not output.status: + logger.error(f"Command {cmd.command_name} failed: {output.message}") + break + results.success = all(detail["status"] for detail in results.details) # type: ignore data_type = isinstance(data, FileStudy) diff --git a/antarest/study/web/study_data_blueprint.py b/antarest/study/web/study_data_blueprint.py index d75488f49e..2895905f69 100644 --- a/antarest/study/web/study_data_blueprint.py +++ b/antarest/study/web/study_data_blueprint.py @@ -1,6 +1,5 @@ import enum import logging -import warnings from http import HTTPStatus from typing import Any, Dict, List, Mapping, Optional, Sequence, Union, cast @@ -980,7 +979,7 @@ def get_binding_constraint( def update_binding_constraint( uuid: str, binding_constraint_id: str, - data: Union[BCKeyValueType, ConstraintInput], + data: ConstraintInput, current_user: JWTUser = Depends(auth.get_current_user), ) -> ConstraintOutput: logger.info( @@ -989,18 +988,6 @@ def update_binding_constraint( ) params = RequestParameters(user=current_user) study = study_service.check_study_access(uuid, StudyPermissionType.WRITE, params) - - if isinstance(data, dict): - warnings.warn( - "Using key / value format for binding constraint data is deprecated." - " Please use the ConstraintInput format instead.", - DeprecationWarning, - ) - _obj = {data["key"]: data["value"]} - if "filterByYear" in _obj: - _obj["filterYearByYear"] = _obj.pop("filterByYear") - data = ConstraintInput(**_obj) - return study_service.binding_constraint_manager.update_binding_constraint(study, binding_constraint_id, data) @bp.get( diff --git a/antarest/tools/lib.py b/antarest/tools/lib.py index c3c5db9dff..60e3215f5b 100644 --- a/antarest/tools/lib.py +++ b/antarest/tools/lib.py @@ -94,8 +94,6 @@ def apply_commands( res = self.session.post(self.build_url("/v1/matrix"), json=matrix_data) res.raise_for_status() matrix_id = res.json() - # file_name = matrix_file.with_suffix("").name - # assert matrix_id == file_name, f"{matrix_id} != {file_name}" matrix_dataset.append(matrix_id) # TODO could create a dataset from theses matrices using "variant_" as name @@ -117,10 +115,14 @@ def apply_commands( res.raise_for_status() stopwatch.log_elapsed(lambda x: logger.info(f"Generation done in {x}s")) - task_result = TaskDTO.parse_obj(res.json()) - assert task_result.result is not None + task_result = TaskDTO(**res.json()) - return GenerationResultInfoDTO.parse_raw(task_result.result.return_value or "") + if task_result.result is None or task_result.result.return_value is None: # pragma: no cover + # This should not happen, but if it does, we return a failed result + return GenerationResultInfoDTO(success=False, details=[]) + + info = json.loads(task_result.result.return_value) + return GenerationResultInfoDTO(**info) def build_url(self, url: str) -> str: return url if self.host is None else f"{self.host.strip('/')}/{url.strip('/')}" @@ -164,15 +166,15 @@ def apply_commands(self, commands: List[CommandDTO], matrices_dir: Path) -> Gene ) command_objs: List[List[ICommand]] = [] - logger.info("Parsing command objects") + logger.info("Parsing command objects...") command_objs.extend(command_factory.to_command(command_block) for command_block in commands) stopwatch.log_elapsed(lambda x: logger.info(f"Command objects parsed in {x}s")) result = generator.generate(command_objs, self.output_path, delete_on_failure=False) if result.success: # sourcery skip: extract-method - logger.info("Building new study tree") + logger.info("Building new study tree...") study = study_factory.create_from_fs(self.output_path, study_id="", use_cache=False) - logger.info("Denormalizing study") + logger.info("Denormalize study...") stopwatch.reset_current() study.tree.denormalize() stopwatch.log_elapsed(lambda x: logger.info(f"Denormalized done in {x}s")) @@ -323,7 +325,7 @@ def parse_commands(file: Path) -> List[CommandDTO]: json_commands = json.load(fh) stopwatch.log_elapsed(lambda x: logger.info(f"Script file read in {x}s")) - commands: List[CommandDTO] = [CommandDTO.parse_obj(command) for command in json_commands] + commands: List[CommandDTO] = [CommandDTO(**command) for command in json_commands] stopwatch.log_elapsed(lambda x: logger.info(f"Script commands parsed in {x}s")) return commands diff --git a/tests/integration/assets/base_study.zip b/tests/integration/assets/base_study.zip index 8a79496138a93221777dcb7a3934f473e015cb44..3b282beb1693044a666fc204fee083eb453c18ba 100644 GIT binary patch delta 18238 zcmaicc|exM_P;aF10w4SxPsyvi6SbZ;sUu~=86ffxgnTZE~Ql0ESJPm(_Emqa#BmF zZfR!amTz9uxY=edVD5^FT3K#Twp^F=_nDbzdtbf3@BPEx=WKK4%$b=p=N#@Yha(Oh zjwpD99xK&cg^X<}^qd?^?k3!y-ptWsDKm=&(9Pz2l&5AQ*BA|K!MD?mH7ugA;6^RV z8K(!*kK^=44p1wGx|1;AEofe^xD+B8LvYdVz^G3kLmF=xhbzfo#Oa#FaBUE z7>jWPzX$xB(T>==hM4B&NlN;n`yhSH7)Wo{)Rg|cn=kN^8AuhQp;H+_=0gpI%$H*L z4mw?*&!XLhhVF8XIn6N9pk_RgUYV%7h>BUbn!H4OHBnEZ+(f>H+BD-ksoiM3CjHQq z*QMOidK22vfKR91^O;MULW)~FUXL#d)q*{TnsS{{t)_e%1r)Fday4hQgy10R^tc}T z+Jykm<|e#4qw9_Nr?mDPExV3Zty(~}XA9;HZ8&Re%!}2kRqM#ARZFc#{f}yqRQe76 ziCU-Ep8eteje%dJ1%Lf(r;Mi-z4LwNypJp19`V$t6D#I!|5xKW|C%yo@`j<+*Cful zRq@iDvaC6e{wFuMNzsbT_?n0G{Oyq+1~gq-Z)NtEu8SQ%8TM#lM=c|M&lxF7I^trP-+tbtyk+of!U z_$Qchc4J)kE;hvRS@s!&LZ7n%-mtq!)^GD7sod+3k}r6>Q)xS~O}=I=sP8u{jaGWt zMvDKEeM!f5u!m@RYdwxicWNQjZzt;~UtzzjsNRoDKwc_6S5t3E6W?VkY4%&}b-J~NJ)!8z>gyqtyOH@(&$@c7oTD^azm4q6qV@1x zzYkd}%H6<{i=wf0USE$1FMS{LzrBu)rTq;sfS2KesVoX~#ed@{gzQKfY>)&Bwl`>E z{r`g_oGPQ$YD$g{v~(>7%6i|{!k?BzDIt;~nI^{K+!U)LHTN1nga+TQxA_067((T- zio_7N`Xe@;{<`n~&@+TU=T9#fbduMwy|Afg;)e_u-l(O_B@ed3+;W_2hc)9)+ zNntH`IAvC7L7u!s)+&$&hO%r5tBaXtmuqo_cgkyc8tUw3fG4Ofd$b01IH|d4@Jldk zQe8-$cG92;4`Vk1Jl`a+6kM>OtYzWHHdJ!5!VT7ME7VD@PIs$b4(Q`+M^ z&J+?y!yn_#ElI6tcyGNHof*d;5;EJF-?G(xl(Ykp3eFBcn)$lkKn^%cYoWlm z`eU17&e27eacy8RS}FLB-aLd>Wob`|buegy`XgkkT&_oZa@K3_aC+x;1U2tKL3F2B1T(?3bEDRp_Pm3$uXrr4 z?>Im*-^Oa3ejDfi-H6nCz-rOux9&+?i+XL)9;cR@wKn9Qh*-luBrrxeeYr{NPB`*v zk^HQJLY^nDv0;IUal7R7efpm|EuOO+6!P@HcDF z)@=22S`AOHJ8Uy{`6Yxsv#&vwk~Mk_YVocN_-Mm)rh7^@YhUa1@iwh3m0gCgCEIYG z^xdY7q3Q2=4Jr{eELg=hc*ugCT8yX5E^Puf%`Pp8Qr2Q*1Ox+gPt`VUzHSF$MQ515 zC-(q*im5xIM5vTC*j9vAUt(|3jI+3fa<9OrN*=-5b*aRivi3ZS@btUFUSpJYncX57 zF@lQUXSEc!`cMlkH^xgx`T%so$*OTzCk_$L1q?tZ=N`wYSFydV{j2B0+mjz>K@nUQh zA3bKZ=3(^tS?Go%$T@qQDZ#X|oHd~~2iaMl0Y(i%m@#vJW3?Op#1ct@_g=&rKYv!Z z!;erDutaO!lNHI8fqI-L?`PJ7(ft?LZCf~{{lp?E?;PS6sXL?=Te80Jd~}{6+*)_e zWD!vsoF;;H^mV8L$4jK959@8@Sp^T=BVCT*aA)_CXbLMQ zUMf|ExSe_FJkY|lDVcjfE}~Eq#l)U93od1 zvJZuS{zF)&0TnO6n-d(#plO_X0vkd@Z!5oY&4JuzFSCito3V0@sNx2~ zg&8j?k7__^b3y;}E$PT4=|dNoRH5_P17f7B4*8(3L+-wC@QMX+8VHvnvDI!OOv3gL zq)d@iz^sk^f+@MF>c4P_%Io+C3t;eGT?Aee(Ngg)nhRdJ%}lmHTKn(vJ0QYLZ^sdj z8Uq>8bjxQ1vAB&ft_-P-khtSWNR}EPt`4U1WZpzv$;RNgiBrjE@Daxmzu#-{q*jQo zm$ng6xHWjxGiDSZ$L&21m*Iw|p^J>i?ZL~ERZuU-ji98b@K#S6i8$1pbOhxj^AyKa zF`7RePV$=S?bZn0HjFCm!s99MWm{MtxCBl+Vrd4WPaNH34#M#wJN{9aZ!e4xCS3y+*QTe$3a7!5gS9=PHz z1~{xR?L<-GC#=Wj8-5em=0Dgx&B_*r6?P7Yq=fI~mfqYjVC;dy>3-7{8mFf~*C!2jHRka^(gx5 zAfTBFY9-1O=*~pl&EGY_EjXTAXS6^%IZ2P-a!NjhhPWv)F* zN$TWpQTL#X#&FV-u@KU|x&_AJ5{su!?g0%B@h9{xE7n7KwE&ACklK&IC{nQzPB4=6 zhGG+h(qn2kSl$*aa*Fi>9bhEvTN85iF1k2I59HIFxG4=G4i1i#7Z*$_z5&)w1Gg_#q14QwfAr5A< z6p%`hVvS=$y>U3PYGVmfM}zN#6E|fvbji4E8g5OPgAav*c7!5Fykli3Eo`aDNHDj6XG5QP8QuDJ_tHWfREqLK2;N9J(}&u=s+g&UiglWEuVG zNr8)xv~W1Ogk8?8#ZVOn80Chx35<8qYW|O^L?grc?z}B?9;~TkSv87Afz{j+&#Kj} zmbd)+p{HLy!F>Nqnd|ffUcZ*&&0nYYQ2V9-`_SV`9fwGIUw=UCt_ZdBc19_jEHkVM z#yhRlA2^&{$j;1xEhD29fmw4f*0s)7=Jc9pY^rX`{r5M9Vg9ti;S}0LPocpAJfprt z=CZ~Bm&XBIt$_pFVl!*%@X}{DAv(Go2ZiQ2sn%{XsgmO{)4&*qgy-V*wp3gTFYM2o zKzhCtcc%$(sEpKfcz(@>4(89c;FW}>tWP%%;D90nXI4*Q$sLQsYf%uk@HHD(n_77A z49rC-g%5QrabQ8HT8fJ-meSt1B!7Xud`>x)An#=z~m3y0E7 zDs5=3*W;t^_vJ&0#=u>_3pc5Q!pV9~DF0_8U*_#?^$wJsh-+#=JIp;X&OuPwOb?}B zIxB)G{;dxV&w^xF9n3hx8hi6ah>{PkV6pWj!R@;UwkDd)%bR#J_f#y&@!sH2LQ$xx zLuTn+1j~0j2#hIBZ0aipbtGin+e2+^4|%Z2HO)xSyVB0)0RQ0xXMc$-kerCbZENOW z3EPQC1%;=!if>kPZ=92_YYT5&2%#?3R;Uq7{Ze6izeI0>8lpaBOxLFR>n^H-RO~l7 zn_pV`N+@rSnHIOg5G}f3h!aVQAhU};fRdWyv|iLoL0sSAoq!S-%2}=8)SX&8z?H2q z0E9&GN+%e_F9ZBm8wWLtV1|%^$qq0oVg|KeTMOJYaa7cGEsB_)`=Y7G_$<+*cXScTq8;iPHTSa)%1{;KrbZ`)&&IVT%{0D7t ztSGvnnkfd#NU^A6Mcu8bL$^In9Ex!9jzeyLFqnI!TFm~Wh3Er$?gI)UN_5ma4KdBy z5D@Nlf(w>2yvoGY(=Zf!sO$xpVQohTA4+?uu{XyKb=SuF_Nobj8l4;jE8c?y85Ufv zi=(u9X9q5^9xe?Cj^S?_;DQLf3zdkG*1q(h`BxoiO0?x+qCtiKJSQ zdU8=+J))>9#;=oRjS((Niir-1&pUBhq8MpQyy3*5?%2+f_-a>&MC2%)60bRNC`jVw z7jc@@@gEO5_)wj+B?fh~Fp)Ga6p_C@>nCkoPs-^EVaq}>;j1<{l8%NtCOq#UOIU~~ zZnjp!up&|Xf)j^Ir;KWYDXGdHO(yT$GTp(uUk1Oocx>UA>|cwf2JQH^nari7r5-AIM? zQflW}iVj6>L+TS^_&4S&;=r4R(99DMIxAlhFF66BCB@kHTMGmvgK8cFQ~0amqakG+ z(+2V>E96JvbJr88;HZ|s*RE99lSh#hPI?{h1NnAc_%8u&P&mE3kvecbI|5NZC@B4L zL`$WD0&K71qqvq@R3MBFdQWj2E=T&&dOu$JNI>gZ74<&6$Hx{VnF{_!6fOtcXn;{) zk1wi&t8Dmb3>dsY5$B)Q(&)}(nA$W0(yTfBMdl6UGho^&P#-XG>rY_?XCBa8uG+@? zN<6au?)tz+A+07#^17(h_u=^#Ac#<-H_p}*s8~4O4i_Cn*IV+Il(`Jy!LCiFtRQN- z3TNp3n{B}IRk%T2$MDYDNjQGKaiC`G|`DJDsX3G;V|i77{B4J``MO6@rYu1Zuy(W`*AL`&^+UO1z1 z0qITj9Pp>BE18l{b}{(qe$7dM{40Vb5y4dEV&cfgNLxu}NaIY(B5%qnUs3A2y}qc$05&x9cTh+hjOS^g4(f)B5K-g2aq~?EIBz&|HE8lc|Hvx5zgMq- zyF`X5mZVYG|I%96P{Gt#;662Ql27J|<+5X2MezwP#Rpt_b^Oj=iJde)(gH_Nmr&de zRV(2@F*P0FCkE;@i*}kmxjkE<(A^WPIz`OJk|xTv%a%bOscZclUs9{t#8>tI+(jz; zPi+049_=g3Y({CK(jIGG$1I;3>rI8!pO{XQao%8m5-DssCwX*zxk2GeCLzlXTgUT8 z8x_R;1n*+$im=H5gD{+K6GB>f=6fGp&uy}#h11SX$n7U>Go7|y5wv{v69>W4B7F#N zw$%Yyb;i&QN0)IIe8cFz*Yvuy;a_M(K;ukD6%NF53mhz3Xa?}bPRNP3_3ol)C6xc= z1V?Su@CY?FS`>d&a|ro(gBK2ESaaN9n(`K&XB~740>c!D+ffYgjtveL{WSIfMwA=s zXYnC7yb&c_c_mh*;n?Xh5)z*Cqo>B=%@mV^zLz(zA%>P`-JWYmg@tD0BrUTE!s+j8 z2)fdS3;T%SmxWo(;q=ugY^2QL@Ir~J=5pZrj=~%!IUy}>BSf?%9dY7@XF*bBHkPbw zxFzHv%FV_mtsZSb!bKlXclt<7V_zc&xN|l#idBMLCtp0G4yXRZ`uWH= z8@3uaCb8X@$Zp=k4oKjAGkk=gk;^%W{l0iKbvY-|KGTOE&0VNrcZGe|_@ZQcS4AHb zH>K-D_4i`M)5`LincgUWTDlmAq-^z)T89kqV)dultr4pFcul@95h9a7(E?_;yo^!W zAH+$U5rBR4UtjV`2l3Lhmt*`udrbF(2<+t~PCB4w7a4LHVkTGm!V}gG!b)};WV4z* zWr<=RC-C#$sLo>FM)8e#a8pIFCKx6u~!ETm8@1%8qOx+ z?6#g;ZGx^!y^+HlEHXHeMwUt@40qrVm$XtL$#)rg6x&Cy&cy*H+uu-2AHs)^bPyvB zF*2F*CX^YYeTu+rf9t~&vm}QEBtER1N~g5`Fsgm{9aKbAd=|wQd8FUt+$$bsOLA2p zu`5rGCmx2OAIvdPoz+syvu?iyjUdD&PF>|0pEla$LJjygbQhhe#6h3*9>Ud;!?E6P zc;iDvFWvwi@TeD82rc;4yo7Uyf@`WT{*7U{)z0}sKdSQL${6D#^}V6s`pg%KW zmt!>@>&4YhOg@1Zc|$|!579`7yQx0aJ&U(JA1ApewejYQD_#-U^~45ev}f7WUNf+tPX^QlEplvDrpP{mUqAkC+<)_@}KffwV4AaIGF2IM|LjP3CXXn58@RzWHj$F(PF4WZ~9{e zx7rZ2T`%?;zy2hPx)!bRL+Uv{5c6MbfH6t@X-*&xAip4hf3v}LC?g27lx1%@%Lz{^ zNzyJCY9L=%>V#wk;)*|W7B5Cwn(Q&fL7i5B!!SOcSc;Dhs{bp?1$&Z&WBO-P90 z2~R0zENB>&r(i*gC%}o(x2Ic~kH5PvP*g>~qf79OUy)owLs%%k zb;5}=J6DrPTB+MI2cN{X82LX;)w{@>w3D*ev=s`R_lwrYVWtWbd{OIbxm6TDx6-1H zA~flCprpUxSJE;u#nu4n!cz@{EVjW7sopOzcHR_3<6VBnBGX$Ii!=_J;6;KIM-&c( zca5RWX_;zI41P~Rus7?`%oPNgKewSYuK5q=GqkA;^tY`GFz! z2dy_{{0Jo;S#Lq=QFM2h&vg^Z@BTq-(+?duG~6NyCM3yyRJhR?;Jg+?>r;g*OvTx| z_dNKjZF1nSi9#r^3szkU#?0^9Nr9U!`XQn>$IZuj6r^At4kGmG-uoc>!k|jhaBaM@ z5LCIy-Km+zeX1l#9J+X$kb41eJO2xRTjGO5*Xv?j5Vo1nr7EA{V6I2#Z%rciO{guI zC)l`bWr|Zaq^9TwZmx!Mql3{pHMh*^^KVP0qza@AE((&DvD&)~T-wX{n-{u_1XS@Y zfSYVrEE4?T0&@i{KWBRh_Z(WF+o|1k z(8h(S7U$=MkJ;YS?mrf@K_Hu>vF-ZsRrkU@KuOoJVi8|>^U3aLr}s?vA;?rNbkzs! zN9B3uY<&D^#4hh4BxzvLe=)av)HV+7qR}rPXSU=C)HUM26dw&~RZFv`5&tBD-j@up z%sjx068NyM6d_uwQB4v1+Vvs`w;ABDLC6s1%m885*A}7d$40VY77{(xXCOM;_>BW6 z(+lX!Hr`sVyohk*k-ZKAq!)(p3MXWB4&n=RI-~dXS!5Ra?~~N%f9HPLbO`Pi8I#CAKY&;_!CMO@kLXh@Gb{XmMhToeOI}nS*gt_s}=5PPmtD5ZXOyNpMB{)mPqR%xUoV4!*48sx8^D+BXKy><&jYQ7os_F8-k6lQ^`Bqg_qd z?NcY%Y#DE+R7^SKU_sZovt;s?6Ia;=#&1mR5Jd=Q(Yp$b}!mE*-dU-B2v5a zPFz|pj3l*t=7=TEUlbqE20lY%5N5*dWVPgXCxLw8ODo#luXY(?XOnV5ei$e>R5lq+|D(o5D!)uQG6GQ%@8-qdpy*h52HO1;bmRU2;2<5TmCnWMBTx$3m&KtD#xiPDRCxzS9%+^r=iIkk|0Zk8M!k6 zmYQ@utN21*fFW;A2e@+{6f8QYz_N_s?rv%#Yp26z!Y4ajPy~3iqoqo0gKrFo+5Zkl z6G4#OKAQ5gU}(>lcSmI`G?V?JSh62P*SFj!P!VPIiw;SsA;eMD^H`+z zl@_>_psr5~reiK8HrP)*(+#ub&V|KWRngh`&)$!;JERSxlivfp3txQ>Y9qApKRB&D2>fXL2soq- zq9diiZ~cQ8=KIwu+=n|%2IQd!v=}YmPfjflXzBPr_rSZTd6^caP5Vm`Hbl7^v8L*D z)o$I8P;!5-4bq0*v_1l<=zCQECHtF#edk^cd5XHeuwag6H0<9$7#RO?cdlpT7+eS zR#oV$?H-a$OMESMt)MWPHaAh@v>4T|vf-wSe)L5XwGsISqTTW)YJe8K@ppPZ$P%j6 zl;&$qH`%m3FGf?lCTgo)ix&A+MyyzArB_4MMz8vYz_3+|obS-O4z*S8dGVy5wx+NR zJ>5`k_39%3$~%RB`&BMlo@t@iLe&PkxQ$MQsx$C;S~SM^@St- zE8m+3;V(WwKd%po(6&~rqT}!5qo6Q?YQxn;?y=UWVj7iA)2Uj_jy2r4z4H2yfmSWR zVyDQ7?6NSyW7v#}=Ck_r$RssZ#71itVW(fJKreVwwb74H7<9@y%XNf`^&4)yV&L$9%Tz3qPvz$s3rnsR2P>E$e$N)Dk}_+zKc zQWiroYniXk9X(CG$0=&EoV9n$RMVvA^v?jQTFp}F_8exTlEo~}$<3(FE2|l^xP^Qv zY9-5{^UpJfFyVZqjNMS^nd!)j7YDNt%9@N=2nQCeWJk4qJ3^`SHN>cP8GI_Vs6J%d zkFHF44q+5ssd_{Wq{32krEVFT-@X9N#}uoo6J6=8I^k@9Xub<|e;F-v_ndGqStWSZ zUbhR54{dvy9p-+{bi%tt)!QfGK*00tD7`ifQ$dU^#@t4qJ<2-q5sjn#$#Ab=s@j^T z50%ensuS)EQF+8dsbW66#qEbu!C^gLY~+}OhL6u?sZRLSPPi0G_2;nbl3>+jHH1b? zMglp;pt+YPW0=lw!Z4vZt8>jP_Pgp%!vU_m8_zvzM&m=k%A8(kZ>r8f)^pT$`7+1{4XC`x=2kX`-iMI%{|OE@D?8D0gt2g&#J!E zriyLnx^4B(^sNXjH%z66*b@}q2(W7Jg4nvZ;2_!!x>k$ zmvwi$jX`{$ZF9TFnLbqYwC-XT*Ow1ss zCZE6a+zj@l#fjQ*!6Hflq+2>yO=24@H0>sf)}kYAbozo4EAn9XEu}%_gid3bw!T-a zR`GzU<@Ri?-RikgtKYq&wykHjwyMVjqzzNGW!=+h_Fbj5R{KB(=hEMI@KN|cGl+Dg z=zkPmN^L?ckaG+%ZC@5Q(owt6t+mV03g|xrVyIS|6QO1GtgqQ~d@A?!J803C_e$6H z^>Jud`mWS2_qhw9wlu<`7d@)a)CQWZR}}SSssjzQ)QbBBLaZ%)W&t6$?;6=C<@}{2 zh+8}z@wlm=Qt*A~?IZzb6<60BYI;j2o7=ICEv+mzD@;oyG~ojTFel;Rw8*q>%Fzo^cg}nz>v-MknKR( zH&+I@4epeD@JG?K; zV8`8sHT3XJ)|}Sop}N_7$Xk+;<5&b$^~b94H0Z|6Rf&4C%b1Utb=g>KMh;}H>3kse zIHh?ge#i2$F|2)vMV1Y~S5ASNCcbF%AU1+(?btd*3}Vkx!~iTzs|T`q)NdRMHLa~0 z0Bb03@w)M!`pee3ti-}!F&fN*wpbZqvLyu-U@>voO{b;#tiWwNmcI74G$eaI>*di* z7@f<-M#j}l7_IfQbfmgG_NZhyFVDkpmy_7dZXcpXP>hcymKqOG8*u+)X=@F;PZJ?1 zuk|S^oBQhS&D3fvCupWUwQ6y1Pj>V{ML^{2JVNPY&GG2T+-$^8#Hu!@A=M7WmMnNI z*00iG`dYt@`WLa=+<#v>G!)yG_s6nVv6mSEd%uif`^^9d-O}OMQ++jx-EnKA5@^13 zX)X2%{|sg0gzo3mg>NHjhO^f^420>stCrbnGqG%qebBJvMBD3yYC4rZ$bA{8CQ{x| zHp^`(R%a|6!lrw5sJs_DP2csEyFutiEP>(UoYRL*GdrXc>5Y#O2r(>ZL)d$SmOq3X z`p+meo=0;x?SF{PpaBnIt5Pw6jnqdNe#oJ}k*f13K#lBdN}gP6?oi=arUPZ9ssgg$=Dt2=%vMZ=FC{ zrxjl+J&XTqbyiIL8hTb~q{Y~O7kgctiW&+*?V7Ev);ee}XwL_KxS|eq5Wqk(QAULlbVJ}#)uA-_ONAwS@)YoucSx^f&NmQ2d8!VD460u`U z{Q^bHdWLnTlHU;3ps(2QvR_eN3Qn*z${U2)y8IiIFELwZ*I-~$%2Ws0td=BN`~~Yk zwXd4ny7O)u@$`$;Vx#aItd(x5M#GlTG$IO0T`dfomU3(Knq@EhoK;JfJ~+uTOqWnV z3TR#pTdX%lw_Q5Nxj(S|jIRg5=&hUD%(1_zGaK&qktz6&B>@Oimhdx2| zsQ8wV>V)(N3vE4v!yyEHX;~Yb@X-4ax^mrW(8Muo{p&^{$i5_-A(&}SLTroOVx_>< zYJ~I7IhHFNNz*x~5P?pNRYTFzdVYX4dNBJV^kCdER}bKrs|T|`<~_))!_2n-irF~t z2AfQ_8*C;mdJ|I;k$ixxZ(=_4m}-t84XN~3p4)Gs79R_Zx};47H?d8`Pw- ziO4bJV?!#BwS#1My46xZ@A_L7^IMV(vd3d^wzUwRinB{|s_=z7SnJ-Aj0jqO9?p9) z@mSnkem=u3rBP&@E3aJss8k(ePMqfCXs+V48msSojj1^{$kLpUzMZ9Zgxs1KaEd8!;ekED7iR3G&VzxcUIdi z8yJ07gdNGZJ1y(zWnV<2#G(3%yNz2xmU!p936_DvLL+DCILpsMdN(B(S=Q^)1A-BE zk=R0fR+vkiAl{5P+lhq!k5Eh#-+ZCNl044xD7TZsX|uR7xeHk_*f0HMBoC$Tw4`Yj zP9K^Yg~`8d2+N`s(Kt^v>4A$4E}6O%QQc9*0;o33GMswvu{7uMl~eal$d$S;WQ(Hj z>A4UV3#2x3MFcr^Aw9T5t~^$$ieYQ_X-pP< zY8dMwCR2GkxHrHDyA+=4CMmNW<`9H92~Q7N&A<+Kbm_iZ)nBPy3g7q zf7&nQ=-~h4rJSfe+TUXR{$G9@e~54S{aereyXWGZ!TWNO1G3Jg-^g7uD|z3IfI&+_ zb8jpuiQW9d#+NfkH{FJFg>COG` zH0k})=*A7dn>{dX%aVEpxr$mD)3BP+_((OJR-aJv_(f~Dl3lMJC2Uc9QuW)Io|jK3 z;mUXQ>aCd6fgGFENPhpS2BJUWfB*DYW2r~g@2Cyw%1wl&>ZD?&lW(h0$}cYZ=09-R zS9(gZQH#x>&+;+k>y9ZII(?Ue5+wg;%>aSfdjU#)88br4nL*b6Z;@8gYs5~7*7|eN>u1jh;D_T+c6>O)TFua46rusH-rBflo#pOpAi*AU&h8;Wl2Qs~z>96!)e ze}1doff5>lY!5Wa+MeYOsy&0e=+3Q)*0O^n{>(z7waZ~peM@3dMHouu(pm9({)iSW zO-xnJ?@Y79(6_?oo@8IGrqECu)c|HcNS64oH&DCKx~JexehB*ZzgB{W;q$?bdV=0{ z`y1HU{TTt%1+Cp|N{v*z)73Ld6nkHy;^w`8HB2$ZK=9T2R-~5A!$cB2E-|^dl24@C zB^b=d{bByR2gjFYgkxY3swmbcRrk3dRJCfsLycL7F^OX3GV9@?Nn{U2%9M0)EA*m@ zvFP92G3ejh9XUCS9Q&9bWlqFgD!!=%(d3EDZgWlW_nx^UOO;N<#cIqX*j$&XD~tPW zJQK7oyNG;>oCmwN4OZ2Y$N;<)bfKnGK`!j1YY1{)rG9-IOED|ZWASznF%443yreo( z>05C4t@+rTte&b`SsMRqQnhq#{Z?dCP5H#Q+u!fm`v$f4@e$1OFD_2{dAWX)Cw>q&)s zf0ZhV;oT#-2G8|4;Cj=*OU4OQP_&6y&O|sq_m*_Oa+r||XIcHd?0LU-6wg+h)3V3a z7`CS1KBDPmGm((E5I{+5 zSfbfN-h-Ze@r064HVgG}&pj+Wrx>e*XyBDc(ZGL4c^kmfiWsd({lRP&~(g!pjilC`orG5 z{xj6B6jY3*Ld3UdhPS|H(^0{#^N>E19&z&;@Ry=k5lb<@Kb2W{HR?nNV>oL+Q_=w0 z%3lVJULkm>{CHQ=r*`RM-lBQCaCcHSpGAoDF{k(H$0bRJ@#th`sg2gm!QkQLMie;I zs0RAeRQ1DZN4=*=RU1WcsjhQ9rBb}w>h_eIKb~zeKbjKWsuJblnFW*0X2ayW^V}*; zsr)f?SJcci3w1IIZZOq8CD+XMB@#wa9*GIKMi#&$obHRgQ@t413RLgCXZ)X+tU$<0?;Jr3uJUZxE7xPG|S)4agOx`-{4C$p<*m9a?~OO>|*{Nsi`k zwmy_~1W%Pbre6IBS>!g5?d|;%lh=^il&JaacP|R&cnO9=ROyFba$9Q1UmfMXelGL2 zF2qjA#+R_ZFY78uz6Rv8*JCI|a&3_X@K~u2g-EH!izQvKyaphOm6`&brEI!olAH3E zO*xUKFOh7z-Z}L&)>HAcmp~hG^uQ{Gi=!acZ>d|^j~tV56INP?_2V5cvOhV-qpDRs zq~@@oS3Fe&uanrd)Znd>wijYyUT}|4%v8*AZ2N<#?jgSR+)u!lmU1cKo@r~yGVcZ} z9>#5R!NUl>Y4H8~Nu_$*N?v{+TPlCw6?vdhMsz;!ecMir~_L5zgCEjnAP>LE^>L3I2d zDB0G!nhvA6ca^r1y$urUpF5oOtrDu+ihDz7QOQ3zm5#_V(oPtBQDRZBP~+!L;bF6N zZZ$aWwzf_u!!!PJWaOPNR&Tw_JK&nSx7LrA!W2oI?%;@$jm}1~Cgpk~F{(P6%{4zl zwZ_?4JWe6&E~uDrv;XCm3#BL@+yR)e3VhSmZXf^vVwze>9MGOnpgL1xhxc>nZY{Tf z+1ON<1u+eXug!X?cokdCN-vhcgT%6oM01RTw{=%(=NRk2X2o;Vg7{e;ek1&oq zNaK!$Qi(q-L~M2oa_|8j@A!e+?CFapKZJu4`m-xCrAdIAAz2%|#ceHw)>}ExogD{d z-lu-B6!Wertz`{JD)YmJV2MQJAO_jQ|0EtD4RQQ98m{+a_*T;v75I60bwuK6Z}d-c z_c4m4ZWC$(ssJoRK3gskfQ1;w{*(xB+Oat_V>5ocsVwji*uVHtQIOi3edNjNAjQbB z!%GV&3XF(5PHQAShg8sQ8w~yZl~Ds#TKN^SeFN$o>rs0wWT|{P%cr029HbFfJ0=Qa2}% zp8QtHVOQLQM)dQy3Lt&g6`Q@I$7VFo=`t5hy{{@AwKdJ{^!|6~ZMVHHP9rM%P6-9% z1eYZk0ANmn*jbaS#dNd-e}YG#EAV{iKDU~-BPj@g6ub%X%ej;~K3D6YTW5&WWMu2C z`BxE(H{W+D_T$ZjXlt7X>z+*7@8TGqER2;ru^e!7;R!AYxfk;#BD~;mmrTinCLicJ zH5qXn{L+qu2S*KRaY_rIVB)>JSa#6ejvkNyHkZ6eI^^c^c+jw4ux{S*#XW_%0j;=# z9&VImcxd*tyeBkSRc^-svJoM*>(1RUINZ5_dz{;I*xkJDT(<1ozz^MA;T+&O?MS$E zT;gz-BJOo*$`RA0i)pe;;s17-ZNc$8?I_|q^zx#cYK+#;afv*X$!yL?F8M~9y%U!9 zD@r8I`577Uo5b0X)ZU#i;p~-)NX>q?|o3U~X&Wi}dcAkmhTJZ=8t;u`K0 z#7V<;)Z`*EHmy8U3`sKU^Ql{nr!i&x?2$m<80?yi7!EPYf>_$ zgu4-*BDqv#MZNBF3*mpF+GJJyuHaVn3g)U9nL3jt;&An{0=!=xI=i^WZ2Rdty$FSp z*rawoDOemd~oS%Nj!iP zhO<>N1<nQeTwVm%fl}0y!lgSALRtfKd%+BfpfS1(d3&0G3P_kIMqURmm>4|4LE@$Vzr` zugntw3Dd>wGj2VBx{^<4@5;QT@z@^}^~K5?aaK|Z@V7J|FF%)=0=>8jkc}czCOM3{ zoz!tbt@J3)Jtx`RUIXyF>7w<#o99MtHIojJ(qV# z`%{M_sZQQ2FdX}}F_*mcg2&-~5y_a$s_)_9vE54Ndvb&XAA{}>!tkTtcv>)aRPZ>8 zb^BK00c$27DGG){x!g+tk87iGQ6lDo8SRwsJk4NA4^y$_>V~_B(w?f#p5(EkD8EOX zBislGw+P*G;Se<4^LuZj>qfxAieWg{1N^4a^})En2>HQVN&sTaRC1aECPFTOrVc{) zn_hM+hf&2KgnN@gu@A?CIeHoC<$BXAZjFwTuLpi~lfyXb0Nd(eNJZ1l#Xm^`-MIj& zg>hnX{Oslha#$T6-iG4w0T#m93NM~OCWx7WpOJ8I{N=uQC+apFk!XL_(~J)XD~S1v zE9XSai4VO6w5fZ|i7ZAcoVESM+X^s#fIIQxC(`N5Yu*Abyq~Cmp(IXn8?Nh8StEhK zczPVa0YW;_vQhBkuE}f6F@gDPdYxOkG39Z}S(yT~py7c?Ibo#FuWps$97L!}FL;$E zFO1G`SfQD(UZT-A+-AZ!_)r!V?JSW608C7hYzcu|vHLly~; z#aR9AIw2Dy`+>;|A-k{CayWbTA6>7i6V{dio$-)?j^q4ck|(g?cO)5r9`o4|nF8FS z5wqK!F#(f3CSdAaw_hOyNTktgNQSfD{AfiNHIBM;Q31m2j9%;u)hS0hqjM$WkhWq8 zUl>V`cED?@Kp+A)iJP51nD2+a#L9nK;%PH&0C-`4uU!d}Z-+jKDE?&95AZg4G}B5aYF<& zX*l~;;&VW$gJvoivz_O_+nnIm?M*+O!JXl*M0ZOj#?+yOhseVinB*m-^$lsskEnJU zbrxnn_Tp#pcQqrNHOZzfpr{5KAoL8>TD9_E`BL5)#72}n`U?EZUVNZf9Xd2c1mf4$ zlKMG|fn~T}gX3{-ip1)w;3l18b)MiX-&B)D+iR5WIj#n~ybOTkx8LtGD1F+ElumjK$}sLBTc^2=w`=ucAs% zNYe!YqfO(F^^r7o8JhpnARkM{y3+kC{u;|+0yE0cZntz@MIdZ1X=*FFwi4t+1{pil zPC8tGdt%9_f!**tSo5mdjmlT3QI-mWaGG_~8x(#-x7Cg2f8pWB3WF>lxz-J6OStf- zK?XFplivDM?R0$;o$D15YkT@;xjInzz@~{0fNA-b(+tKrgDke3z}<>o8xiT>f$PW9 z%oVtAf{Lnp@?HlzS*niFooLrlm%;WDzkuLZfdXkDM&zT8+`)M5Qa2xJ^9r_1JQ%EH zrl4~8r0mE_K^{xBW!U4_@1)BJ=p>N8@>t9?c(xpDc1usVcw#{64BnboU|Zlf#M@3u zXKpT<`maL~uQfgntOLMCTs>V~kFk8di!PH;u8v}XUG1^t;7DIFNbv4{_k}Q3ZhQ;5oP&bAdzuom5 zqbGCeM=;;`g7^ARP!8ansDospv034>71Y^%6)u$YM|bd zfQ*Vc#JyzIgLLwxGp+;*UJM5wAFM}lz+w#9Y2)MIVhow6P}i0~b&1UWa`}`3AfOej+HdS^8+sNi5p{ z=ibo&<{0dJ<~K$#DCkXoJi zO)eO~vE0T{e>5SyzT+gGJ;;(u#jS8AdO{{2YK8N@fCQGbLfI6B^s+2~Y6?^~ug3Wg zN#;Z$1#UQk?9nK)7h0gb8d*R;^mAZ%Nemh-8t+zaqn}bsUW3YElN?A_`y*G&6Va4o zf?GO}x~;?{i1vpixNlUpH)VnAR^Zr+0mx?7iIUzURtq*jZts@Jyo>&bhHxxmlCDzT z0>?2?-<~!pfxOxdV?WCj6xueDCu1hN^#jR;NKeos_L0FWjl)J!gs9IHNjfSH{f$aO zx%|=~$FxubMYXl50BnmFVIC$+2m04Ij#n{I>%%OYBtn6&heJ)K>P>K4#uP_8eVglC=!yIdx*9OV6d@)wxUuA*9odB_m1N`&5 zE%KIV<#fFrml+>wU5;P?|Ef}OTO>x#3~zz_wtRp%cr_*(0zd5a7Qo3qO}9|j`kwv# zfA@QfcVDj~-;D%)?SL*|Z-ZvUgW-M$nz?C^3yLhj$@7D~$SC+|qbaHA@4Fx9dO|?d z{e^oFF-S#*Bzn~Le*Ox5Q(=kXsD1(URSAxUD}8My9YuKrRz-7E5ZG|#mWKpR?|RxO zZv+~e9>{w%ZY zma(QpTQhx>O$J^3f&jlM$WeR?bxGH63OecQ!%B>?vc=$0O|&{nDJ(K2aV{5bCo6xy zYf=fnCy-Jg#Pdk^=XOa_oXDr*=LfW|5%A-icL-i%_m2y30KsFaqE;Am{IH-354%5m2HGyCJa)Z_aNaX z3cYpTu6xk&g9YQ6Q(Uz1!?iN9QlTBG0HllCl4)58=`NiVL9cX$>72a^t~TU491%h&kD!wCg| Gv;PBn-`%4C diff --git a/tests/integration/assets/variant_study.zip b/tests/integration/assets/variant_study.zip index e19151bb0739022a57dc998cc930947f957fd267..4526fc900f0abdfa80347895cd8880f9fe986eb9 100644 GIT binary patch delta 18265 zcmZ`=d0@@Q_BZqWCPB8Fh%M42(vU?qG(w1kS4%AwyI50HQA=JeuSJPhOKB0!Pfn>N zEv1$a>$OI@Yi-ebZSCP{DMi|9c@)3T%zW>?lS})D=6kj|bLPyMGjqD_aNE#HgT^RJv4O~rBClZL!vM&#$6u0ua3@$UA!jkzbIXBzVoTEBn=7DVz08W+uz z$gcrUprTjJCX|%K{b*VPPV{axKj3`w8j0VhNHq0TE%HlEA{c$|OzBZnFF}fUwVTZ>-lr+MvSnfe_uPJ$y)RCL^ z+(yC@2V?m@3Os5It!-4PQnkvPd9(ZFzf=3e6IH5I830IH6?*?CqZZk|Gh${GebdtW zjijJC7t;<#yfy6EsVh<^Pug16CUxZ0`5tLo_t^fr)Ah5V2bQ!QH0$?&S5z#H+->X9 zxzKA$;_ml5d_A&DsQr%bp9N;p(ds$BMm}Tf-S_6)na=_m17StM2jKUR}4%j=b1!YUs$L z5&fI4TT!~OXyM-A?yF0JYR|kBGBKsT?@J>F-6;yVA5!zw!Td}1=d#MT+&S1Y<@M2L zSAXsCQ4RkIhi@z-z*;w`04+kB8+0r0>JV#29lK#d znV&P8fVZcLkFnVsea@1dpDmn!nRK8;LzpyYpZc7kNJKJWC$)GYP%Cy;gf5hf9y098jY$C|Sr*SV@S;!jD=~WQ*ucys8N-tzjQS;@jCH+;% zhS0gyEJY9}36g&+#A~ZGgh?yHtiUCO({{dS;UILc2R=D|U_L4@@r=f%7*){B4fem!c-d^hi_7 zo{e_`J?%-`nT=8DCQLYWGlcnXVT~zs6DGWBGiyp2TOgur6U(DxMGnN^A`KBkyElSu z!6t=GqcHK8gDr*{#aiY#bqtG~@q%MthsT<}6xzs)a<0CQJNEnrO;Q8;b1Qq9j&D=r zq|ktESPD5#UNO{LxYdCO-pW$!Kx8n=*nu%+EJ^Z*E88v`;&7@;tv3LU6RM8AB7wE7 zo`3jW6FTe1lI%z8vq9D9tNN_Dy|508uV(l0X9KI*V;iwn3{*o21PS(ZlO2E$JJZB7Vs>y{70%=h{O>T2KzS?X|74gg` z|CgU^Vk7ht`Y%>8{YLXl8b6A^R9T3P_K)O6E|nfF*HwCX6?UCt((?uTGqj;6Qi^F~ zc$B-KTQ9lrOlgB z*~iFj!10Xu3qpQoH7I;DEbUmzy>>7^+kwNy;Sp}U zJ~Vx|(Uih=87;}S`)>wsW;dGHul~t~R;L*`kd<+Yg{a|dP zOM+95R}k_S2sln@tMc zW7MZU`(QfHf=2v2-^W(aftj&Lnn5F-}IQyj-FY*V#wRKJ*$hnOK<= zDt`eYDzdFbc1UQT;D`fbgsksr%yk@yl6K5<2--#0*$d46{g0LyD=#R%gfk}UBKt;o z$1#FCq)^UaZ2Y1PW_A173c>Tgi;T;m1C^5%j)s1vqpb1A{Eiv4`hzv14P)S~#kbkp zH1IZCMfGm89NPFh)Y!Ln5X@%C2S3o7di#Zt7}Hf+ut|&Yzm_haZmud#86=!l``6YTFl;XAW~U(p)D1r z!u!J+?3Fhju>hWKqw)*~tYRATpbn-L8a7R3%{|;n$|k`yuNkI|7PMneD^Y-X(DW(n zWBY?ithGtI$Fo=Lb0;!yW{=Kcg8&#LY&V-BMi0Y&zdnjJwRaoG3}&x0nl+XTff|1K z10#yc+hZpdjbxAPBZsqei{{o`C8IX^{$)f^MHiM%8S@sGz}Ssd=tY4 zQmyAPFM+iK8)j10EY^fFyWs92RVnTV8n1g0BZq=MO)0S}WZfLdz9uNvLVZrZXEe1} z^kWANF)^iQK~Mbg`dtSv)UA;~J$hX%fseaEP7Kh}j6 zc&L@by7ZyF?VpO76t{zwf$*TL&g^rcV4(eOdlqWa`kBxMt2ltl zdSIy6+p&lC7Hz~z0Mmp@Qz7C}It+C5SCzLF%mvw7zu`1}Zz2{^N(RAk`|y@xx$rK4 zP!2o=?Z~Ao-%x#gT5+6+!l;cR8$=f+^Qu@pzt{`PWGD$ma5{v%vqgtrQ$a**;G$m zHjHf<2T1qXQc_j3Ivw;<+el9L_8XXP>o+jneKXlvaV*-A@-DOz-HK*7m%{t;@2Eo_ z^eQeuQZlbS$8PC|qy$;5FC{HB>lF;)x9zvH_(t4Bm*D&>&v1|=X7SHy%RH=_yg5Ri zP23IRW@|pcneatZ+FCQAU?>lv!f{A)r5t%S zX8&g3AZ^L(YolbQ@>Z?^mbKzp1i7i=ko2d&W~!sCp>r5qn&)b#n9QT2 z*C&mYhfH0KfD4&5g1mg?wn5f?KOI@O( zDC1qziw2R?v^Hm=-GEvy!?`6~3^(g|I`aaT>GXS)`L5Z(1wToyuNYb|3~N}L&0A8~ zFz(9Q`^xk9oikVR&C`G7m$2uy`*@CF+Zlh;*nZ0E(24wc9hd#C#EK^wfD ziZ{>vM`t1^=@OP{NNsG|_z=^NXDO8PVHwHxC1kG~uN!`RtB&((jC)q$WiV%-$IklY zbu*azarKreA7mq^BR3s<-teT<#@y!P7=6dgU_+xC-k3L{(&=U_|3L?jj^@!+9%TB_ z&k>cFo0%H(`d7^i%IuBQ?7_bao9cPN_2DZy1dHSmxq_}19TxIDTE9_&Wdj5033 zdP^spVc4_siC8_h3jt?@fbl`NK*M7YR$vAmRaK)3jr<$^XS-2((yd8ms+vUFBupZ& zT4gbwG?AHeO!6Mi_)_H#mfsNXb53jYy4@<@yl=Iue5H)Lr78)X{`u46I_QBP%kN(${n< z)M)TD(G4y;H$gGy9>=B8>stj5p@VIpbY!B2JNm4N_lj;vG$ig59apVwC0urr#+P~! z3SlTq=0hE4?5~7d-dOQfbTJ2ML!~y+#G&#d)d=Q4=wMWUn$w0oOh3gYHC23tpRzEz zu}3Wv+_f#dWz;iH+|!~;BrR;=l9HXcj>41{_>K$wG@*W!tR$5*b5cJ|%hCYubhMHh zvs0V|VHEQ`)ORD8(p)1z{YkE$&3!iGTWC}$OG&4*kzXceGb~l%GBV8`1?>>tYqwN@ z;&d~T2DXDz)6+qBLQ+*}u~p{RUGO+ysUvyGFl zAx&=sy@W`Go2}ze7}NR8wu-N;r8z)jdBXxrHD-5#;x2LgAI_m72$eLsG$B&^nL29F zA*_dtbF)JbbRJna8I<2$3bu+&?6tUdN*chvbYvN3oe_@w>E5zR{_>Vi)#yEzNM%jI zJi5KcjEY@1eoTk#k2bUT@eUfz)b(IZiU8}#j!tlZvz+&o7(ALE)p4lpHB*8Aco^KJ z`*Ru}3Vm@(+;tsntB+U6q^B|I%uX7$8Bs~nNgapEpe1QsXD3UtQs-L-B;4$vX@a+U zncu;3HR$dNJQc`^{ptb-5K4!rS&KRK>FT5oAk-1{Ii1#Vt3rXxYlsaq@p&hUFUc}v zc|%+zWm5EdG`y=I3SN0ir$XT|RpmO~Q83n-ZkmKn0w)De%GAJ17vkg%TZ7uETzHq@ z(>fK(m8tYj188-&yT<(58q`;%D@^X8fl zjd|*75J=yg*vkp_q})wNsCUO8?Zm~{q>85C`&K7Fc{G9YTH;hhal?X1zqp!Q* z|81%y*!~#7ks^oS2L0TbQQ5Tw=eyy2X;~v=40qkYs39g|?jy1)OY$ZEI3#b zRDJvM7X3A9M;#b7<2X3Ny2o(mH#n?VbAZm*6tB>KZ526-SRHA_DUT5x-v(x>;#D_X zYo%SEftpO|4ec76nS9@4;5WD8{Yy%U8RX8gx3QTb#K{LE%ZqSfcWh)&%GpxMi|)JO ze5ibj7t!_%SfdzKO3Q(MhR#*z5TP%ga%zTg83g7?AhqWx$dKzsD>X*Xxe}89;9evZ|7`fFakeTbp{#C=bLk4$ z0~age0rxE^YVD!?&!tMJ?PmymY6rq`llKH9B0u~}geEjkLMQQG*GMd-UNO+svK{wx zzn?I-__c~@`V~0*Wh0gcY*BKjT{dvl`w=6Jw; zN8mz4!9JRDr{T!Ea04u-EpY9dMuz(pv*TXHa_|=;ldpAscv90F7}s@!4BDp%QSKa|J2D}N|ae~Ah z)KH?OfTnK885FY(s~P`^6E}kY==wmJY#TC`@gG&%WwZRK`!&p>{6)N1%Kn|Q43=PsIbz_xCeZ=y*+4SE!m8`*{0YtZH&6=7R zJSxXx94#kVI59PdHNlny`UCWBnYy#NiE4oPsbK&BXb$r4o-dOpo>kPkc5mYQU z)1Ih#x7yAT0%+_iq~vmkOAB9e;8vQ?$>nDwDDxT~l;LFow5cb`5nV(Ri_@&^gcG0U zrs+TGYU8X-Mr*|RaU696vAtaC?siZGqNM>3nSQisEA~;^V7OOEuq7eT2oP`Gvdu%< zIFPc6K=7GPfUXheM#HhP3HVp*X#(&PZoO0Bu>scH4CkH_su8TX#^R*2rEdYMX_$uc z3o?*rSJwR7(TX5%qr=cH%~M`>;sTrvD!S$uEP~u37#fkmQaFE%#wUu`w8GD*r(QtJ zPf(0Gz8OBSWV}W-x+BUnBECqipU=@ycRIr80|OzzEvDVvr>Kf^IG#x?qvI~H-wC$i z_N&=DoZr=Po7;0=O8l7DlLO1Zw()?*ixPTc(N7b~HLUhv$kr+K|hTZ z*IdyVHLCg_;%N;nG+s1h2l7*ye!k!W%g&k}{FnKWOd5~)@LO zbDUOk?)Z>%ceD$2;zM*u`W~38d=!FQ`d&!7*IJ{_D8YL*T8hx&;UvgTcPBuTkxuZ1 z8v$nHV8)%4y$$D+sz~8zMgp&x?ZSO#Fnh;|Z{mXTgm!Ak(tYOh6txdpe*K)L1bUUM z$42>D`{MHo=jc@G#1FV2vPY>C59#Kl@ukh(&CXgxyl3Iix%3QW_Ch9Q8{vqEgEJK$ zaDg;#IG(%*zl3mHIt~FXF&oR}*(FbJntp2Ozo+$ zj9Y^90OC-hABP-D9y!%}X#`zIV#^-y3vn|nFy>i<64ST`^*@hWFEozkmv!8XL6Diz zANl8jff(=A-b$S0^Dl?z%k}9t5O#gw!huCpesI!+@=<*h3mUCP@w3*4#l%LKx;lv1(K0UDp9t*eoy-D;fxon10B3+4U; zH9nC+DEr>(0sH=2$D!TJQt_?>EV{T*$@ZX>9uBt}+zV?qXpkkd7hKh~l)t*bHR#q% z)x4#vIC-#>uZBn;8mslZq=V5lhCUUsMWu?phiEJ^ZE!8`VF&IyTn^2nvXxA_oZKYA zJSp2^76dY5a2a*gP)y{MPT*)aYea94G<|GwT6K2E%ndI%MOm(B8xB{@9OeYqAdxX> zs+a3Hqzt-e_8D&Ri9SU~9~(-eV&`Ii2lIW85ul+BYbRopRDJl1?h+Q|IV}DJ{gX$y zlgoSp&2TcGh~;Nqawia}gf^X*M^+|CS3%vCyIr&h9IbQ_?%}H4C?!D>$b`bR95P(p z&Pe`cJMqc+{Yjb~>#Egrvg@LNuB(0C{H2tLd8QClX1t$zG&7AQytlkp4-wycg2o4&Ki#iq zMhNMle9lBCuBK>fj1)Zu(fpPU?zk8azIde!q4Bjb)X+&93tAt$@WVO;O^-Tt>&Y55 zdLgX=*6HB9MU_~B|K((NbWcMaQ;>zGc-yvmdYP_DqMS2%xnkp`Asqw*|T5Nw7~@qOHu(J^RJob=E&DC0Aci<`WeMJ!7mDQwG3vslM{S&XLE=w-e3HUukE|(=eKG2xab&GaQ zF->VizttKJEx2i7hT%MYod$mOF2qTLZPdZ&!A()#P;?j5E&fPjK`(Aom#Zm*-P+_N z2qX05S}P%Km0je-bx_)^yn{s;x7i5}qlJGVq0fFEhn)0bgn&?fWSb(uRt}=BzhWy3 z*H&A#$#y5ME|s)b<3;lo7P#yWqq9&yoPW5(N!WpM{1Ig3KK##a1up&Fs7r%?hwQ!& z(WREGogq7BX6(_3_O^qA+Gb&=FZ>-Ad3LW8*PUj(X$HzI_wN$tXhF_);Uv)}9L#@n z!!=RT9{dX2ql4~zU4Mf^=2c(@13y!^!nufO=p(jXb`hpS`2h{*=qGNaX882yE;gXh z$BHm_Uxc}Zl`+-nPz}@xbG#tWDsvIS?wTTvM9~)xHS{>hiz8XE2hkm8liv7&te7@@= zn5NFdktdg1?q~kBCV#=3Xya~G9p~NzH}EssuB>q3{Ap)3RM%Y_vhQK1IDa}khsC?8 z`>7k=n>yN*iSbh+XMk*ZM3d6o+w9HP>5v`XW_Pu@!j5W$s_j~6pXUa~4{61=@ja$d zJNm9IrmCCqc|pF+ zu3Qz2r%wpxdQ>?=%$2Wg{YEfEhz{>4%KaUIAC1DnJm^~| zSpLqj2AX)$C|ZEY?9)+>o^WKOZ{dPo`VOwX%?q$U0{^247+-*!8(PJ|d`bhOTRcwe z-w@ih1S}m+YdDlF@LM>m+q<^%Lf6e<-)VaPO|zF!$fTt$Dpf1O&InvI;n~2cHGYyd z6SLZOh8E8^vnhNQKAN3XbQ$y2wbH6dyl;W=n@YTF3;Cc%vD*2ZQRAHAlTQbKtKpmH z#AF&$b}Ellc6C$p7mEP>>S$OLAyKM02gkBSJREyvFxMaqU>0)+Cq4;&1Qwea$x6Y4=I96aWUt`b7s zhxGoth>7^&fK@&CmxM%#-wgENm*qzz@sk0xj*0Lkiv~R&39c!;RQ|t__#Htvo=}Ar z+wpLZ-+179@h)~beo)YjFLwj`h+h_&W@H39!i!K_2ji(HQ?p_2w2Y(E>Wb`HP^S~c6jQo%Bdneb= z0WvQb!Nyaj0N0_}7mQ}&mjL0$A`gRRo;SL?I%JL7)fCZ7+kqd> zL(qlOipKW>z_PhkTsDn?XA~GeiIDO=DG@(^Fbyj!sj~4bnzgI^KiIlI-T(jq delta 17122 zcmZu&30&31_UF$1fr=~_1VunZ16c$_1rbp}&0XYbuE|KFM4@7~{lug|CAceXim=FFKhXU@Iz(0_h$ zlag#Ydd5o2haIDfR%?I98h7h!R=RT9I?EBMS*Kfgg~gv{p0Q?;Z8r0#;wkvg?0s4? zw=plr+(y>1-}2MZ{&bmH{mHRki=v|0tRZcg&TIm9pB`GQS9mMSM4q4TLHVt<){=mS zY|G;*8*Ti<{4lDH(zNnZW!oB-zpyRV!kcePB73wJfe{*(r)<2~kXI~7qRMEkEw9aQ z^x5iUPrhzK5?{6;u{?ZvO~dj%3!ZF5Z%1hXd}MY2&)?`t|BTWSdG3Y-JmS3|zJ9_} z)IU~MGArCQM2wfe&TKr z1v!$rJU_puC*NbSQSLN$lNK!10_f#0St!|wHJ}SG88#AwCF-z@D&|g(g-%5LK}cR* z$;LUntJp}53YTb2$+n-}rHYeSp4+?F7%Dr=G{?~0tg}YVR|1$_!R}Cl5*-~!@r$%5 zp*mH+$21E40E@YQI}6hs8_HQj3;l6~bVNwa?jp^SPp5!}MOK{xW%C1;x{1%q#!kiHEOQ#zv zKn<02mQA??L%;;?7J;5F<|;9b9aYQOO{+_$o{g^ zx22Y_ew2W@(~q;XIMb3))r*=}Ig!1VYAxttu@<81>*%zId0VLYTsFzcU}XzWusEtX z&Nd1^Ore5fFxG&TPQyxvaT@k9fpw7YS1@+F&N*>m*q40e`|JqHWZzlDK6ijc|BK(w3*t3#9(47l zwY5;A-23qt7%$2R34~k^zGp=H@^!fx7KGhWJ~daTE)Lf` z-ooF8I6iF5Bp#n5aNYR+F#?`KS~J8(LtCn9h8$Qb@tn}NJjn>YDqk#F!={F@zJ%)g0R#QQ|OK?`nK1Ng+~NIHJY z3g>C#%WaVXI8Bf7S*g8f%Y6VJrYz)PaqDSY7_T5#=<9D^U>n3o*M zPsGO>T+hkDa{p8Yo0r^%SEu&{`ynv|W&8%D$9)CfaZnejGx9-ynrfaHR zUFnlwVR(H;y?Jg%XG6KJ{9M9tL&44~?qe^UNbe~Q3CEnlY=wo2#<56B%tPF*DnMGG zhL|9-k70haU5mJ4|CZ(nv~0yQuaueM%d{Ms4$yF zN`G<&D=Dc7&AG@r3oaWK<|FtV`xxiHy+8|b$&8`&d=XcWX3E)u!rO5M7G{ScPFspbZtClfTbDkxb&yFqjfYe3@h36Hb){V|#&dC!bQy)6fPpNY;T5zQZ&x`~%a6 zN|SX(kc#Ki+Cugo)lS7!atm1z-JFX5%$ka2u9^%OW)e&n3K6#^79p*X9#;?HIh7UC zjY2I($NChpyp_4ISy&eqv@(=6a(Mc&cAm63Q;U=o2;Tz!I&BJJQz(q+I(LFe_8jv_B1CJxhrDfbdK*mqvBJj(YZ_w?w%-|$T2 zUo@`dlRXls@E2PkRvLB+3cO#`0+EuJWeLOWu?m-M(c@Ez&bHKGBk76xeo_CFy(L z{!r}rrj{TA(w{0XA$cvf;NoOI%cAMRFW3ot&tPYd`31Wx{tVa-|I9{H?9a%)Wjt70 zV{9xbbJj(+Md3dah=iu*Blrl$K^`smm;U6GbS#?%lIW2MGjc>C1 zilix5g(UYYbtOr*f?!J%VMT-?$aLVk1z$S&qcI1eNTS1jip|g*n82S-P*~aUt#*=dqGRxhV*y z1vcILqv>1 zCcM&{+V#XHOALV^OtTZU>0#vl>k2I+2hp&eIE~LtM8;TLgxX2zr!seH4ovT?%c62Qp6+28)23wRLP~?v9e|9TBIp7iNKa1 z)Ec3P2cho&u9+4mVi&GkD}{eko^oLLpz@^sR;zQK>p|F-7aY(i%NIxa{XY6Wp-JB2 zs^g1PAg@0T3%Ppo^QI3@#)}v0wuWw(hje;k$A%+AzIJnSYiZ@}(t@exhP4Tuy>4yVc=#}H zw+6P$1D<@?mABFY=d3x|qw#Brp$mTctNX!(w85R?(`MYbez5fQ=k50vygFrP>ii3p zF@3Hl9XMLh&MRzo+}M#3ccN+@s~maqVv~)XdoMe4#pl*mzc==NRJ!=tr`?jLoa+|7 zVaV{s~?$d&( z+g>Dp=l5#C)<2z~FWs%}DC1W|y5veN%=)UkK{~Zk>m~=GDT4li6aM~QtrM*-f;;c4 z)O@T(4Gj9rm4FZ0r-fU;)j^Z3i47&L^~y@Eqt0@5H&|Bv=tdvy2X)aoYcNUBb%6fd zt>szMPPG|Xwf;V4?A{-zCCYkN@eG) z16a$mv^+)&6*bE^vPElQ>;(x4=b76gC?iqx;b|MasNrLp4?AUGPL;)qx!Wi^Rtsjk zE(r;$;DC&5Pb^?UZ8kavFF0s~J}WdN_R(p@p-ET1iV)(b^Mi z-k&;SyA6mH=V0J#k3p%=?+Iu)%ApZddc*2TEusKz`IinIyu7Wx7{?<3wQ6L^(3psrIfzyfK?8U(9%Q+HQ4gN1!(LlqP8a~J1q1i&YNA4_QDVI2$#UX!d=)-Kt#tsaFd>iA z!xbp%N|UyUmxR0^-yMb5XtF=8`Ot?!7=CUYj0gW^w_&?}ZK1#&6H9a3YIbH%a0#M2 zZ4o)EJXM7%UFbLp|J~|C_giC(kakWcPfC3Z3-usH2b7NQEO!rWEyB7-rwV9OQIcv% zv!=1ux(Kgn5b>Id`(YY_tSuh?B6b9e?4t8kzl2qQfx_5)15&yemlafn<0xZ2OtEej zsBfsav?Z|V-%Bt+PMR)6vHk7Hq*;Fms94oK>VN1bddi18%>GJvnTb$70SgG$aPERV0u#cbA4R6 z08y1rrqW_0Z_8f6z~@z5+rX733+5j5709;==Aq%vjLd$UGKGH=KEGpu1(y`2QluNB(vdu&M#C(ITZ+oLoK`e@!vv4iEhr~?`6-1O z6ZNKtui}`koD1cD8|%Uaih??d;|sKCRxnNhqp;r5wdBEU-FO9e^Hr>cl=R^Q1$+EpPMRS=N&^x|dU(BJoa0VH{&B0P$aZ#noAy&SE5#BeU z&1v8(@WoI5;i79!$6qmGrXDFy7wfpHgUpxKd@11~aNR~fEr1(R{c~rA@I!oc}E=kR) z@;DSTSx%d{Ub;~b*TdM5xjIYPJMeHkWEdX4%LGqgG4otBEy=zL8Gwv~c%txO8%-Rp z5XQ5{0Omj6#UjB2#e;?qTcm>59(B$M?;je?;W&zZR|)XcktR-`FgA68F3$cnVodO1 zOswb~Y?D(ec>fAS+kPu>di8ljp$0fZ(f11z*HyQUIe5E;F1}#e_coF>sq`ij97tWb zmc^PZauEj7&BHLi^s+yeLe<3v4$cuk)`L*BU@=x&dcl8G9G+8DFL-jXE>rTYIpp+$ zb1Hb?S5AvbFX;EC!m{%#rx)xu!Odx^(7^P9*d;C&=>?&*cpBDay#W@_IP}3qYb;ei zkLaW=)u{mv6c0Ls*c25~G83*)^#t}Yo@O+vnSlkYF$qv}8I48izsx118P(3jmaLiq z_ugxQ1I5Engiak<;-U_u)z53g*gGl&k3$BbWw}DwX$FhckMaj%I?o%pnisVKcGmo8 zN>g9LDGrxXRnF%wNq*ve$<7YfgP`*9BKVNYsC+$FD16?6Z?g)S{{oJZThrhYk)#k_ zo{lxlnGVA&QNe?sLoC@l9W{%!MHuCVNRu0`l6=m0tXL{&gheS(plcf;4O;T9Bzvo` zH=op7&vy8#-fC#PVNx9FjacCa`yYKz7hXEhPdtYu&@4Zwnz-5p#k&|19+6}(^F@d} zqF`2K#_2)^uW^zmue4!G<@z^e*tEV>a~CBe$(y;>N!El14T{xyzjXoq@R}AO`8%z1 z68ec(wN8qRY6X=qP(`6I#;{V8L6*d4d!gtnr&6JyCVAJ9^~8D?7q|77bHR(NCp_l| z9RpiZWFU5W+WV4He6I@BMzgLTIKO%DFvF6kOxSjNw7eYPO)r2p`(QtG`Ow7>GBnY+ zv;-n^YhNdG`=?3Pd`O~hpzk+0S;fm=2deeQ0;O$KfgU{E?z{;-D)dx6+ORBXdgoL( z8It_@);>m(oE?ajPW#9}bk(mkpEbs~T|ahG)4j&}whv>cbfErXx1uQyj|?vuG3gUY zE5Lr@NpcWdr9wVwfoo-{2lB_VAW%2htWe{PvZKDTGfZ$38XSVKep-d3hvIf)3hDQM zE+LQW{5wsEA6;k;B|^5iXyEd}b(C1B;P}0fG5QMJRiVR1L{L!(oIGc%DThaa*47;R z>4*xg7$GGXmhQaG#ego1R;UP_$;Am4f@1*{8y~j19)1LCR926JYFk0yL@%twOcf`d zS^0Z8Qd%@SQxBD*Sg8M}ib(x+HcFqRL&4ykHd>!$=w~K9kIISHXSqm)R*Z=k5{y~? zp`gUmZWhZaH${jC-4X0-74mB{t+Rf7$TO(ZcAYx07Dd=Gx2$nAv(_5MR+->Hyz)ls zK6&ppP_1^jI0n#bx2?hKV-pg{{~f6l^EQzIw5ZnV$=dI9F$7ZPZEG^!`VY7&3~=}z zYcD$SDN@Vn0-Ww}QTy=*(J}hS*oE$VV3e;e@sLV0C(59~gL4?m+GVnfm*)<$%EC_R zU<7q<`sfboA4wIG4uGZ<{;M^SydPSTXxFfg^9T8dZd&SXcqhudQ{>i#=sQ?lF1 z7|ly!@wOWmNo5VRSW5!^rx&UZ5Xett*zlGeNu~F!v6d}5Jh6Qy*{yKdfqR_-n)3WK zAG~i%gW<9BZ%p59pN_5V)KAPj)ExMbfvJhp2d-|b56qsfGzDU0@f4m+W%nTL7Xy>n zKALX)3Bwmy5ouoAZ_+~nDrKOTgQrwE2Y-z4c%EEu>JJRGp#cR7#;OJox%zWcq?mkO z4}J114q7?zhOb-$<1JsVNGzLt$Tb{oy=(2f(p?L&9CpIQXkL`i zNK2xkKOpDyU&tO7=;)RAtvSNlUbe1vPMnCZP44_OPdcn&H@aN!Uj`Pg$E#AK>p8$T zN1VLTqH~}N%?Z&&--Jfh4RGd7JnBSC#n4L-qnqNIfk=jZz*2b{M3wSM;SZ73w;TO0)tQJ5}|--Qb^U^@P+}^ z*TNGW3vw)*uEqLlmll-w5Ik^%V7BsG9SRQ=jSQ($1M+gmw(WS*iPEQ^n}MX@o;6fo zmQM|=u)(C$B*MP&ihyg-%6$8&IEO@h% zJtnG(i&X-2h;$V5!;M+k^R9|JJ_dyfY07_`Q)Hrdqzk1_#`4Jg%ywSKp;rWjn$vg# zzBUqBvs~yICT;?|YJN1Kk>_xst!KNu3yP$d&!T6vp6!mP1p9q0$bV*IDBCFbR920` z9OsJ%U-X@z%T4GhlkuMo0(iRYJ&Dq5LPsZH+I@f0nVmSiV^72+yHA4P+mGOK{>EKf z8XR2#8twl8QEK)@MMB~fT)5?uqd~+*6MKT-SCs&*FYbhgN>4%qi{Txa6QRgEzbIlP zm3MEH_~6vWRA6#n%O!u7Z<2O&pwMi@X}R93>t2_6Kn_xV}ToRO~Il2CQ0$-U!Az8;V9_BX)P-L(QMRp zgXL)rK4N|}q2o6dA2IuXd|2MUT`W!L_LI00%Zb2(hKkxvg&_Tk-i6av(eso_5Hk*Y z+;o)Lw_J1Zr|x#>DT{~FQ7)P|)Pcl%**ha(^+mT8mYNaBO`F#$kb;pogk+V`^oOM? zf#@EpYwKXv@s1+F*-0e5Ae>o$b>Tcjr%(ghFvY27?Lb(e+Y~IuQ}w94Ma$6>?BxFf z%T6R|$55TQ=Wlg~kj$EBRZ^)w``=XPWiqJHx8%cG|F^y@G&XghmPNR8FMkAw-lk~X zYvnB5ey__sRM)Sl9@v|v79y0%Wz0vhm(}P5q8p0P4W&QZQB5Xtm-srlOWIvI00{kP zil07qEVEi@`9K7hIiYyhfqTWWX$GWiCX9H&_}DfGADK+2O%Bm%ZImO-IY0;U$NT^PFgi?uV~ z<*C+x&M0W~eRU;!Q*l&_SJKOmnVjeVGlUNL8A2bAFi}g;4<^*FC)h30BtgrVD!6eQ zC(4U=_h>DZMHiW>)&!G;+Fi6J{*M8oqMTB?3Y*HOU0<}axa<(187eQYfyyN&hK`{z z?SDwjig7U$de^$r{;Sq-mL01@(V)f$`$y^FdGj?~;~XZ2u3SUggal*L79+Szd~v}k(k#K-%ovp=h#7hbh?qS{*6cZ^M?c_-c!(fF!0Lp)7|vkeI* z^w=eEzA79C?Eo%stGJBY;=@Z&|9}Eby9^29oyUCoJ(1{SYRqq0T2u92Jhw+%RUP8m zyGW?;0`8~iUA4=s%yLXD9nx?A{_HEj1pGGdl`gYudI3wVchy zM~k%@o(4qaZEli|F8CRn4Ta-iO8Wrdt0vfkwmpTgkB3p4XznwP@$6(5ANp7?A(<`O zjIDn{1@Fm2W^#P97PWFSLVuS|3P01_QWd(ApB`0du}Xl9 z(`b&fZNgN;y6DVBA0k2{iL>dJd(8wx-IPZy_nS&^AqAOn!BS0I`2fELk|vyx=8}sZ z+$KnU(5e`s8>`Fvh^%d^aT@i5aeXYH;?RpcMP=BQ((-n^U>oEhl+_E0E%fQJd zNpw>VU?CYgH6ZfB?nQgH!=DbAV0XG*<<$FlDcpB(4;PC&&FNFe#=_~QZpG36!#YqN zP$^}ix$h5HJ7XPIcv2rl3OeIDC|-G>ufRo%cFj0Kt7d@GQr2_F22 zdxZ2#xwHBThJNYb@st9Yv6f0=-zo6?w`?$PS~aYzM$M@~MXdHhQoF<#SmO zA5b75ZCA{&M(Sfp=q?>98d$Sfna-UEh5glZ)tm$}TLY=X+W=wtA2@XKR zsVp?qQ}KpMAgc}aRD5!RAy`O2wE>mW_L$^CUyF?fypO}@8jbi&Ij#pH?}?t! z;&&C?{4<ee1sKdFFu(SbNIQY>5av_d?17~)i&DHtb2%%>(5FWj#>LF{%H z3SAmv>E(suoB^)wqxI8|inJY)aHS6mL`I4md~qu5C__7*!o!#sUX~3)&5nCFw{CUnB N assert res.status_code == 200 -def test_area_management(client: TestClient, admin_access_token: str, study_id: str) -> None: +def test_area_management(client: TestClient, admin_access_token: str) -> None: admin_headers = {"Authorization": f"Bearer {admin_access_token}"} - created = client.post("/v1/studies?name=foo", headers=admin_headers) + created = client.post("/v1/studies", headers=admin_headers, params={"name": "foo", "version": 870}) study_id = created.json() res_areas = client.get(f"/v1/studies/{study_id}/areas", headers=admin_headers) assert res_areas.json() == [ @@ -970,7 +970,6 @@ def test_area_management(client: TestClient, admin_access_token: str, study_id: "solarPv": True, "solarRooft": True, "spilEnrg": True, - "stsCashflowByCluster": True, "stsInjByPlant": True, "stsLvlByPlant": True, "stsWithdrawalByPlant": True, @@ -1138,7 +1137,6 @@ def test_area_management(client: TestClient, admin_access_token: str, study_id: "solarPv": True, "solarRooft": True, "spilEnrg": True, - "stsCashflowByCluster": True, "stsInjByPlant": True, "stsLvlByPlant": True, "stsWithdrawalByPlant": True, @@ -1713,11 +1711,13 @@ def test_area_management(client: TestClient, admin_access_token: str, study_id: "enabled": True, "type": BindingConstraintFrequency.HOURLY.value, "operator": BindingConstraintOperator.LESS.value, + "group": "default", }, "binding constraint 2": { "enabled": True, "type": BindingConstraintFrequency.HOURLY.value, "operator": BindingConstraintOperator.LESS.value, + "group": "default", }, } @@ -1752,11 +1752,13 @@ def test_area_management(client: TestClient, admin_access_token: str, study_id: "enabled": False, "type": BindingConstraintFrequency.HOURLY.value, "operator": BindingConstraintOperator.BOTH.value, + "group": "default", }, "binding constraint 2": { "enabled": True, "type": BindingConstraintFrequency.WEEKLY.value, "operator": BindingConstraintOperator.EQUAL.value, + "group": "default", }, } diff --git a/tests/integration/test_integration_variantmanager_tool.py b/tests/integration/test_integration_variantmanager_tool.py index f381cab3c9..4b27c84848 100644 --- a/tests/integration/test_integration_variantmanager_tool.py +++ b/tests/integration/test_integration_variantmanager_tool.py @@ -21,8 +21,7 @@ generate_study, parse_commands, ) - -test_dir: Path = Path(__file__).parent +from tests.integration.assets import ASSETS_DIR def generate_csv_string(array: npt.NDArray[np.float64]) -> str: @@ -49,27 +48,26 @@ def generate_study_with_server( f"/v1/studies/{base_study_id}/variants?name={urllib.parse.quote_plus(name)}", headers={"Authorization": f'Bearer {admin_credentials["access_token"]}'}, ) + assert res.status_code == 200, res.json() variant_id = res.json() - assert res.status_code == 200 generator = RemoteVariantGenerator(variant_id, session=client, token=admin_credentials["access_token"]) return generator.apply_commands(commands, matrices_dir), variant_id def test_variant_manager(app: FastAPI, tmp_path: str) -> None: client = TestClient(app, raise_server_exceptions=False) - commands = parse_commands(test_dir / "assets" / "commands1.json") - matrix_dir = Path(tmp_path) / "empty_matrix_store" + commands = parse_commands(ASSETS_DIR / "commands1.json") + matrix_dir = tmp_path / "empty_matrix_store" matrix_dir.mkdir(parents=True, exist_ok=True) res, study_id = generate_study_with_server(client, "test", "720", commands, matrix_dir) assert res is not None and res.success def test_parse_commands(tmp_path: str, app: FastAPI) -> None: - base_dir = test_dir / "assets" - export_path = Path(tmp_path) / "commands" + export_path = tmp_path / "commands" study = "base_study" - study_path = Path(tmp_path) / study - with ZipFile(base_dir / "base_study.zip") as zip_output: + study_path = tmp_path / study + with ZipFile(ASSETS_DIR / "base_study.zip") as zip_output: zip_output.extractall(path=tmp_path) output_dir = Path(export_path) / study study_info = IniReader().read(study_path / "study.antares") @@ -83,7 +81,7 @@ def test_parse_commands(tmp_path: str, app: FastAPI) -> None: ) res, study_id = generate_study_with_server(client, name, version, commands, output_dir / MATRIX_STORE_DIR) assert res is not None and res.success - generated_study_path = Path(tmp_path) / "internal_workspace" / study_id / "snapshot" + generated_study_path = tmp_path / "internal_workspace" / study_id / "snapshot" assert generated_study_path.exists() and generated_study_path.is_dir() single_column_empty_items = [ @@ -188,20 +186,19 @@ def test_parse_commands(tmp_path: str, app: FastAPI) -> None: def test_diff_local(tmp_path: Path) -> None: - base_dir = test_dir / "assets" - export_path = Path(tmp_path) / "generation_result" + export_path = tmp_path / "generation_result" base_study = "base_study" variant_study = "variant_study" output_study_commands = export_path / "output_study_commands" - output_study_path = Path(tmp_path) / base_study + output_study_path = tmp_path / base_study base_study_commands = export_path / base_study variant_study_commands = export_path / variant_study - variant_study_path = Path(tmp_path) / variant_study + variant_study_path = tmp_path / variant_study for study in [base_study, variant_study]: - with ZipFile(base_dir / f"{study}.zip") as zip_output: + with ZipFile(ASSETS_DIR / f"{study}.zip") as zip_output: zip_output.extractall(path=tmp_path) - extract_commands(Path(tmp_path) / study, export_path / study) + extract_commands(tmp_path / study, export_path / study) generate_study(base_study_commands, None, str(export_path / "base_generated")) generate_study( diff --git a/tests/study/business/test_all_optional_metaclass.py b/tests/study/business/test_all_optional_metaclass.py index 2e83a4e433..1c379f6460 100644 --- a/tests/study/business/test_all_optional_metaclass.py +++ b/tests/study/business/test_all_optional_metaclass.py @@ -3,7 +3,7 @@ import pytest from pydantic import BaseModel, Field, ValidationError -from antarest.study.business.utils import AllOptionalMetaclass +from antarest.study.business.all_optional_meta import AllOptionalMetaclass # ============================================== # Classic way to use default and optional values diff --git a/tests/variantstudy/model/command/test_manage_binding_constraints.py b/tests/variantstudy/model/command/test_manage_binding_constraints.py index fc124bdb40..2ca4015808 100644 --- a/tests/variantstudy/model/command/test_manage_binding_constraints.py +++ b/tests/variantstudy/model/command/test_manage_binding_constraints.py @@ -79,7 +79,8 @@ def test_manage_binding_constraint(empty_study: FileStudy, command_context: Comm cfg_path = study_path / "input/bindingconstraints/bindingconstraints.ini" bd_config = IniReader().read(cfg_path) - assert bd_config.get("0") == { + + expected_bd_1 = { "name": "BD 1", "id": "bd 1", "enabled": True, @@ -88,7 +89,7 @@ def test_manage_binding_constraint(empty_study: FileStudy, command_context: Comm "operator": "less", "type": "hourly", } - assert bd_config.get("1") == { + expected_bd_2 = { "name": "BD 2", "id": "bd 2", "enabled": False, @@ -97,6 +98,17 @@ def test_manage_binding_constraint(empty_study: FileStudy, command_context: Comm "operator": "both", "type": "daily", } + if empty_study.config.version >= 830: + expected_bd_1["filter-year-by-year"] = "" + expected_bd_1["filter-synthesis"] = "" + expected_bd_2["filter-year-by-year"] = "" + expected_bd_2["filter-synthesis"] = "" + if empty_study.config.version >= 870: + expected_bd_1["group"] = "default" + expected_bd_2["group"] = "default" + + assert bd_config.get("0") == expected_bd_1 + assert bd_config.get("1") == expected_bd_2 if empty_study.config.version < 870: weekly_values = default_bc_weekly_daily.tolist() @@ -123,15 +135,21 @@ def test_manage_binding_constraint(empty_study: FileStudy, command_context: Comm res = bind_update.apply(empty_study) assert res.status bd_config = IniReader().read(cfg_path) - assert bd_config.get("0") == { + expected_bd_1 = { "name": "BD 1", "id": "bd 1", "enabled": False, + "comments": "Hello", # comments are not updated "area1%area2": "800.0%30", - "comments": "", "operator": "both", "type": "weekly", } + if empty_study.config.version >= 830: + expected_bd_1["filter-year-by-year"] = "" + expected_bd_1["filter-synthesis"] = "" + if empty_study.config.version >= 870: + expected_bd_1["group"] = "default" + assert bd_config.get("0") == expected_bd_1 remove_bind = RemoveBindingConstraint(id="bd 1", command_context=command_context) res3 = remove_bind.apply(empty_study) @@ -148,7 +166,7 @@ def test_manage_binding_constraint(empty_study: FileStudy, command_context: Comm bd_config = IniReader().read(cfg_path) assert len(bd_config) == 1 - assert bd_config.get("0") == { + expected_bd_2 = { "name": "BD 2", "id": "bd 2", "enabled": False, @@ -157,6 +175,12 @@ def test_manage_binding_constraint(empty_study: FileStudy, command_context: Comm "operator": "both", "type": "daily", } + if empty_study.config.version >= 830: + expected_bd_2["filter-year-by-year"] = "" + expected_bd_2["filter-synthesis"] = "" + if empty_study.config.version >= 870: + expected_bd_2["group"] = "default" + assert bd_config.get("0") == expected_bd_2 def test_match(command_context: CommandContext): @@ -341,7 +365,6 @@ def test_revert(command_context: CommandContext): operator=BindingConstraintOperator.EQUAL, coeffs={"a": [0.3]}, values=hourly_matrix_id, - comments="", command_context=command_context, ) ] diff --git a/tests/variantstudy/test_command_factory.py b/tests/variantstudy/test_command_factory.py index 09f45d30f3..5f9af93ee2 100644 --- a/tests/variantstudy/test_command_factory.py +++ b/tests/variantstudy/test_command_factory.py @@ -125,17 +125,7 @@ def setup_class(self): ), CommandDTO( action=CommandName.CREATE_BINDING_CONSTRAINT.value, - args={ - "name": "name", - "enabled": True, - "time_step": "hourly", - "operator": "equal", - "coeffs": {}, - "values": "values", - "comments": "", - "filter_synthesis": "", - "filter_year_by_year": "", - }, + args={"name": "name"}, ), CommandDTO( action=CommandName.CREATE_BINDING_CONSTRAINT.value, @@ -145,11 +135,8 @@ def setup_class(self): "enabled": True, "time_step": "hourly", "operator": "equal", - "coeffs": {}, "values": "values", - "comments": "", - "filter_synthesis": "", - "filter_year_by_year": "", + "group": "group_1", }, ], ), @@ -160,11 +147,7 @@ def setup_class(self): "enabled": True, "time_step": "hourly", "operator": "equal", - "coeffs": {}, "values": "values", - "comments": "", - "filter_synthesis": "", - "filter_year_by_year": "", }, ), CommandDTO( @@ -175,10 +158,6 @@ def setup_class(self): "enabled": True, "time_step": "hourly", "operator": "equal", - "coeffs": {}, - "comments": "", - "filter_synthesis": "", - "filter_year_by_year": "", } ], ), From 2363b76eff9977c6627b519d9370e32330870101 Mon Sep 17 00:00:00 2001 From: Samir Kamal <1954121+skamril@users.noreply.github.com> Date: Thu, 21 Mar 2024 15:06:49 +0100 Subject: [PATCH 032/147] refactor(i18n-ui): remove initialization function wrapper --- webapp/src/i18n.ts | 61 ++++++++++++++++++++++---------------------- webapp/src/index.tsx | 3 --- 2 files changed, 31 insertions(+), 33 deletions(-) diff --git a/webapp/src/i18n.ts b/webapp/src/i18n.ts index d1d95a0574..980cffbf89 100644 --- a/webapp/src/i18n.ts +++ b/webapp/src/i18n.ts @@ -2,34 +2,35 @@ import i18n from "i18next"; import Backend from "i18next-http-backend"; import LanguageDetector from "i18next-browser-languagedetector"; import { initReactI18next } from "react-i18next"; +import { version } from "../package.json"; -export default function i18nInit(version = "unknown") { - i18n - // load translation using xhr -> see /public/locales - // learn more: https://github.com/i18next/i18next-xhr-backend - .use(Backend) - // detect user language - // learn more: https://github.com/i18next/i18next-browser-languageDetector - .use(LanguageDetector) - // pass the i18n instance to react-i18next. - .use(initReactI18next) - // init i18next - // for all options read: https://www.i18next.com/overview/configuration-options - .init({ - fallbackLng: "en", - backend: { - loadPath: `${ - import.meta.env.BASE_URL - }locales/{{lng}}/{{ns}}.json?v=${version}`, - }, - react: { - useSuspense: false, - }, - interpolation: { - escapeValue: false, // not needed for react as it escapes by default - }, - ns: ["main"], - defaultNS: "main", - returnNull: false, - }); -} +i18n + // load translation using xhr -> see /public/locales + // learn more: https://github.com/i18next/i18next-xhr-backend + .use(Backend) + // detect user language + // learn more: https://github.com/i18next/i18next-browser-languageDetector + .use(LanguageDetector) + // pass the i18n instance to react-i18next. + .use(initReactI18next) + // init i18next + // for all options read: https://www.i18next.com/overview/configuration-options + .init({ + fallbackLng: "en", + backend: { + loadPath: `${ + import.meta.env.BASE_URL + }locales/{{lng}}/{{ns}}.json?v=${version}`, + }, + react: { + useSuspense: false, + }, + interpolation: { + escapeValue: false, // not needed for react as it escapes by default + }, + ns: ["main"], + defaultNS: "main", + returnNull: false, + }); + +export default i18n; diff --git a/webapp/src/index.tsx b/webapp/src/index.tsx index 80dec85813..2c6792f7a8 100644 --- a/webapp/src/index.tsx +++ b/webapp/src/index.tsx @@ -1,7 +1,6 @@ import { createRoot } from "react-dom/client"; import { Provider } from "react-redux"; import { StyledEngineProvider } from "@mui/material"; -import i18nInit from "./i18n"; import "./index.css"; import App from "./components/App"; import { Config, initConfig } from "./services/config"; @@ -15,8 +14,6 @@ initConfig((config: Config) => { window.location.reload(); } - i18nInit(config.version.gitcommit); - const container = document.getElementById("root") as HTMLElement; const root = createRoot(container); From a7cb7d9dcad7d25e4383a8e4508363248db889a1 Mon Sep 17 00:00:00 2001 From: Samir Kamal <1954121+skamril@users.noreply.github.com> Date: Thu, 21 Mar 2024 15:07:22 +0100 Subject: [PATCH 033/147] feat(utils-ui): create i18nUtils --- webapp/src/utils/i18nUtils.ts | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) create mode 100644 webapp/src/utils/i18nUtils.ts diff --git a/webapp/src/utils/i18nUtils.ts b/webapp/src/utils/i18nUtils.ts new file mode 100644 index 0000000000..c613deab68 --- /dev/null +++ b/webapp/src/utils/i18nUtils.ts @@ -0,0 +1,22 @@ +import i18n from "../i18n"; + +/** + * Gets the current language used in the application. + * + * @returns The current language. + */ +export function getCurrentLanguage() { + return i18n.language; +} + +/** + * Translates the given key and appends a colon (:) at the end + * with the appropriate spacing for the current language. + * + * @param key - The translation key. + * @returns The translated string with a colon (:) appended. + */ +export function translateWithColon(key: string): string { + const lang = i18n.language; + return `${i18n.t(key)}${lang.startsWith("fr") ? " " : ""}:`; +} From ec49773bd701a470815c25b3fb09f0f5dd679160 Mon Sep 17 00:00:00 2001 From: Samir Kamal <1954121+skamril@users.noreply.github.com> Date: Thu, 21 Mar 2024 15:13:55 +0100 Subject: [PATCH 034/147] feat(hooks-ui): create useUpdateEffectOnce --- webapp/src/hooks/useUpdateEffectOnce.ts | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) create mode 100644 webapp/src/hooks/useUpdateEffectOnce.ts diff --git a/webapp/src/hooks/useUpdateEffectOnce.ts b/webapp/src/hooks/useUpdateEffectOnce.ts new file mode 100644 index 0000000000..61fedd115e --- /dev/null +++ b/webapp/src/hooks/useUpdateEffectOnce.ts @@ -0,0 +1,23 @@ +import { useEffect, useRef } from "react"; +import { useUpdateEffect } from "react-use"; + +/** + * Hook that runs the effect only at the first dependencies update. + * It behaves like the `useEffect` hook, but it skips the initial run, + * and the runs following the first update. + * + * @param effect - The effect function to run. + * @param deps - An array of dependencies to watch for changes. + */ +const useUpdateEffectOnce: typeof useEffect = (effect, deps) => { + const hasUpdated = useRef(false); + + useUpdateEffect(() => { + if (!hasUpdated.current) { + hasUpdated.current = true; + return effect(); + } + }, deps); +}; + +export default useUpdateEffectOnce; From a1a3fae06474d84c4a1b98af61aff2babb89fe97 Mon Sep 17 00:00:00 2001 From: Samir Kamal <1954121+skamril@users.noreply.github.com> Date: Wed, 17 Apr 2024 15:55:16 +0200 Subject: [PATCH 035/147] feat(common-ui): update GroupedDataTable * refactor with `useMaterialReactTable` * add async loading * set `group` and `name` columns frozen * update styles * hide duplicate button `onCreate` not defined --- webapp/public/locales/en/main.json | 1 + webapp/public/locales/fr/main.json | 1 + .../App/Singlestudy/explore/TabWrapper.tsx | 1 - .../GroupedDataTable/DuplicateDialog.tsx | 4 +- .../common/GroupedDataTable/index.tsx | 292 +++++++++++------- .../common/GroupedDataTable/utils.ts | 9 + 6 files changed, 185 insertions(+), 123 deletions(-) diff --git a/webapp/public/locales/en/main.json b/webapp/public/locales/en/main.json index 0205b8a0fd..8216534daa 100644 --- a/webapp/public/locales/en/main.json +++ b/webapp/public/locales/en/main.json @@ -70,6 +70,7 @@ "global.assign": "Assign", "global.undo": "Undo", "global.redo": "Redo", + "global.total": "Total", "global.time.hourly": "Hourly", "global.time.daily": "Daily", "global.time.weekly": "Weekly", diff --git a/webapp/public/locales/fr/main.json b/webapp/public/locales/fr/main.json index 86768663c2..4188d598ca 100644 --- a/webapp/public/locales/fr/main.json +++ b/webapp/public/locales/fr/main.json @@ -70,6 +70,7 @@ "global.assign": "Assigner", "global.undo": "Annuler", "global.redo": "Rétablir", + "global.total": "Total", "global.time.hourly": "Horaire", "global.time.daily": "Journalier", "global.time.weekly": "Hebdomadaire", diff --git a/webapp/src/components/App/Singlestudy/explore/TabWrapper.tsx b/webapp/src/components/App/Singlestudy/explore/TabWrapper.tsx index ec7e9149c3..256d5cfa3a 100644 --- a/webapp/src/components/App/Singlestudy/explore/TabWrapper.tsx +++ b/webapp/src/components/App/Singlestudy/explore/TabWrapper.tsx @@ -84,7 +84,6 @@ function TabWrapper({ display: "flex", flexDirection: "column", justifyContent: "flex-start", - alignItems: "center", }, sx, )} diff --git a/webapp/src/components/common/GroupedDataTable/DuplicateDialog.tsx b/webapp/src/components/common/GroupedDataTable/DuplicateDialog.tsx index 34664b8f3a..099748ef9a 100644 --- a/webapp/src/components/common/GroupedDataTable/DuplicateDialog.tsx +++ b/webapp/src/components/common/GroupedDataTable/DuplicateDialog.tsx @@ -1,5 +1,5 @@ import { useTranslation } from "react-i18next"; -import ControlPointDuplicateIcon from "@mui/icons-material/ControlPointDuplicate"; +import ContentCopyIcon from "@mui/icons-material/ContentCopy"; import Fieldset from "../Fieldset"; import FormDialog from "../dialogs/FormDialog"; import { SubmitHandlerPlus } from "../Form/types"; @@ -38,7 +38,7 @@ function DuplicateDialog(props: Props) { { data: TData[]; - columns: Array>; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + columns: Array>; groups: string[] | readonly string[]; onCreate?: (values: TData) => Promise; onDelete?: (ids: string[]) => void; + onNameClick?: (row: MRT_Row) => void; + isLoading?: boolean; } +// Use ids to identify default columns (instead of `accessorKey`), +// to have a unique identifier. It is more likely to have a duplicate +// `accessorKey` with `columns` prop. +const GROUP_COLUMN_ID = "_group"; +const NAME_COLUMN_ID = "_name"; + function GroupedDataTable({ data, columns, groups, onCreate, onDelete, + onNameClick, + isLoading, }: GroupedDataTableProps) { const { t } = useTranslation(); const [openDialog, setOpenDialog] = useState< @@ -39,31 +57,164 @@ function GroupedDataTable({ >(""); const [tableData, setTableData] = useState(data); const [rowSelection, setRowSelection] = useState({}); + // Allow to use the last version of `onNameClick` in `tableColumns` + const callbacksRef = useAutoUpdateRef({ onNameClick }); + const prevData = usePrevious(data); - const isAnyRowSelected = useMemo( - () => Object.values(rowSelection).some((value) => value), - [rowSelection], - ); - - const isOneRowSelected = useMemo( - () => Object.values(rowSelection).filter((value) => value).length === 1, - [rowSelection], - ); - - const selectedRow = useMemo(() => { - if (isOneRowSelected) { - const selectedIndex = Object.keys(rowSelection).find( - (key) => rowSelection[key], - ); - return selectedIndex && tableData[+selectedIndex]; + // Update once `data` only if previous value was empty. + // It allows to handle loading data. + useUpdateEffectOnce(() => { + if (prevData && prevData.length === 0) { + setTableData(data); } - }, [isOneRowSelected, rowSelection, tableData]); + }, [data.length]); const existingNames = useMemo( () => tableData.map((row) => row.name.toLowerCase()), [tableData], ); + const tableColumns = useMemo>>( + () => [ + { + accessorKey: "group", + header: t("global.group"), + id: GROUP_COLUMN_ID, + size: 50, + filterVariant: "autocomplete", + filterSelectOptions: groups, + footer: translateWithColon("global.total"), + ...getTableOptionsForAlign("left"), + }, + { + accessorKey: "name", + header: t("global.name"), + id: NAME_COLUMN_ID, + size: 100, + filterVariant: "autocomplete", + filterSelectOptions: existingNames, + Cell: + callbacksRef.current.onNameClick && + (({ renderedCellValue, row }) => ( + callbacksRef.current.onNameClick?.(row)} + > + {renderedCellValue} + + )), + ...getTableOptionsForAlign("left"), + }, + ...columns, + ], + // eslint-disable-next-line react-hooks/exhaustive-deps + [columns, t, ...groups], + ); + + const table = useMaterialReactTable({ + data: tableData, + columns: tableColumns, + initialState: { + grouping: [GROUP_COLUMN_ID], + density: "compact", + expanded: true, + columnPinning: { left: [GROUP_COLUMN_ID] }, + }, + state: { isLoading, rowSelection }, + enableGrouping: true, + enableStickyFooter: true, + enableStickyHeader: true, + enableColumnDragging: false, + enableColumnActions: false, + enableBottomToolbar: false, + enablePagination: false, + positionToolbarAlertBanner: "none", + // Rows + muiTableBodyRowProps: ({ row }) => ({ + onClick: () => { + const isGrouped = row.getIsGrouped(); + const rowIds = isGrouped + ? row.getLeafRows().map((r) => r.id) + : [row.id]; + + setRowSelection((prev) => { + const newValue = isGrouped + ? !rowIds.some((id) => prev[id]) // Select/Deselect all + : !prev[row.id]; + + return { + ...prev, + ...rowIds.reduce((acc, id) => ({ ...acc, [id]: newValue }), {}), + }; + }); + }, + selected: rowSelection[row.id], + sx: { cursor: "pointer" }, + }), + // Toolbars + renderTopToolbarCustomActions: ({ table }) => ( + + {onCreate && ( + + )} + {onCreate && ( + + )} + {onDelete && ( + + )} + + ), + renderToolbarInternalActions: ({ table }) => ( + <> + + + + ), + onRowSelectionChange: setRowSelection, + // Styles + ...R.mergeDeepRight(getTableOptionsForAlign("right"), { + muiTableBodyCellProps: { + sx: { borderBottom: "1px solid rgba(224, 224, 224, 0.3)" }, + }, + }), + }); + + const selectedRows = table + .getSelectedRowModel() + .rows.map((row) => row.original); + const selectedRow = selectedRows.length === 1 ? selectedRows[0] : null; + //////////////////////////////////////////////////////////////// // Utils //////////////////////////////////////////////////////////////// @@ -127,106 +278,7 @@ function GroupedDataTable({ return ( <> - { - const handleRowClick = () => { - // prevent group rows to be selected - if (groupingColumnId === undefined) { - setRowSelection((prev) => ({ - ...prev, - [id]: !prev[id], - })); - } - }; - - return { - onClick: handleRowClick, - selected: rowSelection[id], - sx: { - cursor: "pointer", - }, - }; - }} - state={{ rowSelection }} - enableColumnDragging={false} - enableColumnActions={false} - positionToolbarAlertBanner="none" - enableBottomToolbar={false} - enableStickyFooter - enableStickyHeader - enablePagination={false} - renderTopToolbarCustomActions={() => ( - - {onCreate && ( - - )} - - {onDelete && ( - - )} - - )} - renderToolbarInternalActions={({ table }) => ( - <> - - - - )} - muiTableHeadCellProps={{ - align: "right", - }} - muiTableBodyCellProps={{ - align: "right", - sx: { - borderBottom: "1px solid rgba(224, 224, 224, 0.3)", - }, - }} - muiTableFooterCellProps={{ - align: "right", - }} - muiTablePaperProps={{ - sx: { - width: 1, - display: "flex", - flexDirection: "column", - overflow: "auto", - }, - }} - /> + {openDialog === "add" && ( row[property]); return generateNextValue(baseValue, existingValues); }; + +export function getTableOptionsForAlign(align: TableCellProps["align"]) { + return { + muiTableHeadCellProps: { align }, + muiTableBodyCellProps: { align }, + muiTableFooterCellProps: { align }, + }; +} From ed863e91be0c4814f97a9ae61f7d89231bb3ed66 Mon Sep 17 00:00:00 2001 From: Samir Kamal <1954121+skamril@users.noreply.github.com> Date: Mon, 25 Mar 2024 13:34:41 +0100 Subject: [PATCH 036/147] refactor(clusters-ui): update columns definitions * use `createMRTColumnHelper` for type safety * remove duplicate columns `group` and `name` --- .../Modelization/Areas/Renewables/index.tsx | 105 +++++----------- .../Modelization/Areas/Storages/index.tsx | 114 +++++------------- .../Modelization/Areas/Thermal/index.tsx | 114 +++++------------- 3 files changed, 93 insertions(+), 240 deletions(-) diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Renewables/index.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Renewables/index.tsx index c66c2b388a..316a15ca0f 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Renewables/index.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Renewables/index.tsx @@ -1,5 +1,5 @@ import { useMemo } from "react"; -import { MRT_ColumnDef } from "material-react-table"; +import { createMRTColumnHelper, type MRT_Row } from "material-react-table"; import { Box, Chip } from "@mui/material"; import { useLocation, useNavigate, useOutletContext } from "react-router-dom"; import { useTranslation } from "react-i18next"; @@ -26,9 +26,10 @@ import UsePromiseCond from "../../../../../../common/utils/UsePromiseCond"; function Renewables() { const { study } = useOutletContext<{ study: StudyMetadata }>(); const [t] = useTranslation(); - const areaId = useAppSelector(getCurrentAreaId); const navigate = useNavigate(); const location = useLocation(); + const areaId = useAppSelector(getCurrentAreaId); + const columnHelper = createMRTColumnHelper(); const { clusters, @@ -42,97 +43,48 @@ function Renewables() { [study.id, areaId], ); - const columns = useMemo>>( + const columns = useMemo( () => [ - { - accessorKey: "name", - header: "Name", - muiTableHeadCellProps: { - align: "left", - }, - muiTableBodyCellProps: { - align: "left", - }, - size: 100, - Cell: ({ renderedCellValue, row }) => { - const clusterId = row.original.id; - return ( - navigate(`${location.pathname}/${clusterId}`)} - > - {renderedCellValue} - - ); - }, - }, - { - accessorKey: "group", - header: "Group", - size: 50, - filterVariant: "select", - filterSelectOptions: [...RENEWABLE_GROUPS], - muiTableHeadCellProps: { - align: "left", - }, - muiTableBodyCellProps: { - align: "left", - }, - Footer: () => ( - Total: - ), - }, - { - accessorKey: "enabled", + columnHelper.accessor("enabled", { header: "Enabled", size: 50, filterVariant: "checkbox", Cell: ({ cell }) => ( () ? t("button.yes") : t("button.no")} - color={cell.getValue() ? "success" : "error"} + label={cell.getValue() ? t("button.yes") : t("button.no")} + color={cell.getValue() ? "success" : "error"} size="small" sx={{ minWidth: 40 }} /> ), - }, - { - accessorKey: "tsInterpretation", + }), + columnHelper.accessor("tsInterpretation", { header: "TS Interpretation", size: 50, - }, - { - accessorKey: "unitCount", + }), + columnHelper.accessor("unitCount", { header: "Unit Count", size: 50, aggregationFn: "sum", AggregatedCell: ({ cell }) => ( - {cell.getValue()} + {cell.getValue()} ), Footer: () => {totalUnitCount}, - }, - { - accessorKey: "nominalCapacity", + }), + columnHelper.accessor("nominalCapacity", { header: "Nominal Capacity (MW)", - size: 200, - Cell: ({ cell }) => Math.floor(cell.getValue()), - }, - { - accessorKey: "installedCapacity", + size: 220, + Cell: ({ cell }) => Math.floor(cell.getValue()), + }), + columnHelper.accessor("installedCapacity", { header: "Enabled / Installed (MW)", - size: 200, + size: 220, aggregationFn: capacityAggregationFn(), AggregatedCell: ({ cell }) => ( - {cell.getValue() ?? ""} + {cell.getValue() ?? ""} ), Cell: ({ row }) => ( @@ -146,16 +98,10 @@ function Renewables() { {totalEnabledCapacity} / {totalInstalledCapacity} ), - }, - ], - [ - location.pathname, - navigate, - t, - totalEnabledCapacity, - totalInstalledCapacity, - totalUnitCount, + }), ], + // eslint-disable-next-line react-hooks/exhaustive-deps + [t, totalEnabledCapacity, totalInstalledCapacity, totalUnitCount], ); //////////////////////////////////////////////////////////////// @@ -175,6 +121,10 @@ function Renewables() { return deleteRenewableClusters(study.id, areaId, ids); }; + const handleNameClick = (row: MRT_Row) => { + navigate(`${location.pathname}/${row.original.id}`); + }; + //////////////////////////////////////////////////////////////// // JSX //////////////////////////////////////////////////////////////// @@ -190,6 +140,7 @@ function Renewables() { groups={RENEWABLE_GROUPS} onCreate={handleCreateRow} onDelete={handleDeleteSelection} + onNameClick={handleNameClick} /> )} ifRejected={(error) => } diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/index.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/index.tsx index 06e6db36db..e3f82110f0 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/index.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/index.tsx @@ -1,6 +1,6 @@ import { useMemo } from "react"; import { useTranslation } from "react-i18next"; -import { MRT_ColumnDef } from "material-react-table"; +import { createMRTColumnHelper, type MRT_Row } from "material-react-table"; import { Box, Chip, Tooltip } from "@mui/material"; import { useLocation, useNavigate, useOutletContext } from "react-router-dom"; import { StudyMetadata } from "../../../../../../../common/types"; @@ -25,6 +25,7 @@ function Storages() { const navigate = useNavigate(); const location = useLocation(); const areaId = useAppSelector(getCurrentAreaId); + const columnHelper = createMRTColumnHelper(); const storages = usePromiseWithSnackbarError( () => getStorages(study.id, areaId), @@ -56,54 +57,9 @@ function Storages() { ); }, [storages]); - const columns = useMemo>>( + const columns = useMemo( () => [ - { - accessorKey: "name", - header: t("global.name"), - muiTableHeadCellProps: { - align: "left", - }, - muiTableBodyCellProps: { - align: "left", - }, - size: 100, - Cell: ({ renderedCellValue, row }) => { - const storageId = row.original.id; - return ( - navigate(`${location.pathname}/${storageId}`)} - > - {renderedCellValue} - - ); - }, - }, - { - accessorKey: "group", - header: t("global.group"), - size: 50, - filterVariant: "select", - filterSelectOptions: [...STORAGE_GROUPS], - muiTableHeadCellProps: { - align: "left", - }, - muiTableBodyCellProps: { - align: "left", - }, - Footer: () => ( - Total: - ), - }, - { - accessorKey: "injectionNominalCapacity", + columnHelper.accessor("injectionNominalCapacity", { header: t("study.modelization.storages.injectionNominalCapacity"), Header: ({ column }) => ( ), size: 100, - Cell: ({ cell }) => Math.floor(cell.getValue()), + aggregationFn: "sum", AggregatedCell: ({ cell }) => ( - {Math.floor(cell.getValue())} + {Math.floor(cell.getValue())} ), + Cell: ({ cell }) => Math.floor(cell.getValue()), Footer: () => ( {Math.floor(totalInjectionNominalCapacity)} ), - }, - { - accessorKey: "withdrawalNominalCapacity", + }), + columnHelper.accessor("withdrawalNominalCapacity", { header: t("study.modelization.storages.withdrawalNominalCapacity"), Header: ({ column }) => ( ( - {Math.floor(cell.getValue())} + {Math.floor(cell.getValue())} ), - Cell: ({ cell }) => Math.floor(cell.getValue()), + Cell: ({ cell }) => Math.floor(cell.getValue()), Footer: () => ( {Math.floor(totalWithdrawalNominalCapacity)} ), - }, - { - accessorKey: "reservoirCapacity", + }), + columnHelper.accessor("reservoirCapacity", { header: t("study.modelization.storages.reservoirCapacity"), Header: ({ column }) => ( ), size: 100, - Cell: ({ cell }) => `${cell.getValue()}`, - }, - { - accessorKey: "efficiency", + Cell: ({ cell }) => `${cell.getValue()}`, + }), + columnHelper.accessor("efficiency", { header: t("study.modelization.storages.efficiency"), size: 50, - Cell: ({ cell }) => `${Math.floor(cell.getValue() * 100)}`, - }, - { - accessorKey: "initialLevel", + Cell: ({ cell }) => `${Math.floor(cell.getValue() * 100)}`, + }), + columnHelper.accessor("initialLevel", { header: t("study.modelization.storages.initialLevel"), size: 50, - Cell: ({ cell }) => `${Math.floor(cell.getValue() * 100)}`, - }, - { - accessorKey: "initialLevelOptim", + Cell: ({ cell }) => `${Math.floor(cell.getValue() * 100)}`, + }), + columnHelper.accessor("initialLevelOptim", { header: t("study.modelization.storages.initialLevelOptim"), - size: 180, + size: 200, filterVariant: "checkbox", Cell: ({ cell }) => ( () ? t("button.yes") : t("button.no")} - color={cell.getValue() ? "success" : "error"} + label={cell.getValue() ? t("button.yes") : t("button.no")} + color={cell.getValue() ? "success" : "error"} size="small" sx={{ minWidth: 40 }} /> ), - }, - ], - [ - location.pathname, - navigate, - t, - totalInjectionNominalCapacity, - totalWithdrawalNominalCapacity, + }), ], + // eslint-disable-next-line react-hooks/exhaustive-deps + [t, totalInjectionNominalCapacity, totalWithdrawalNominalCapacity], ); //////////////////////////////////////////////////////////////// @@ -220,6 +167,10 @@ function Storages() { return deleteStorages(study.id, areaId, ids); }; + const handleNameClick = (row: MRT_Row) => { + navigate(`${location.pathname}/${row.original.id}`); + }; + //////////////////////////////////////////////////////////////// // JSX //////////////////////////////////////////////////////////////// @@ -235,6 +186,7 @@ function Storages() { groups={STORAGE_GROUPS} onCreate={handleCreateRow} onDelete={handleDeleteSelection} + onNameClick={handleNameClick} /> )} ifRejected={(error) => } diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/index.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/index.tsx index e403b52e5f..467d908cc4 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/index.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/index.tsx @@ -1,5 +1,5 @@ import { useMemo } from "react"; -import { MRT_ColumnDef } from "material-react-table"; +import { createMRTColumnHelper, type MRT_Row } from "material-react-table"; import { Box, Chip } from "@mui/material"; import { useLocation, useNavigate, useOutletContext } from "react-router-dom"; import { useTranslation } from "react-i18next"; @@ -29,6 +29,7 @@ function Thermal() { const navigate = useNavigate(); const location = useLocation(); const areaId = useAppSelector(getCurrentAreaId); + const columnHelper = createMRTColumnHelper(); const { clusters, @@ -42,106 +43,57 @@ function Thermal() { [study.id, areaId], ); - const columns = useMemo>>( + const columns = useMemo( () => [ - { - accessorKey: "name", - header: "Name", - size: 100, - muiTableHeadCellProps: { - align: "left", - }, - muiTableBodyCellProps: { - align: "left", - }, - Cell: ({ renderedCellValue, row }) => { - const clusterId = row.original.id; - return ( - navigate(`${location.pathname}/${clusterId}`)} - > - {renderedCellValue} - - ); - }, - }, - { - accessorKey: "group", - header: "Group", - size: 50, - filterVariant: "select", - filterSelectOptions: [...THERMAL_GROUPS], - muiTableHeadCellProps: { - align: "left", - }, - muiTableBodyCellProps: { - align: "left", - }, - Footer: () => ( - Total: - ), - }, - { - accessorKey: "enabled", + columnHelper.accessor("enabled", { header: "Enabled", size: 50, filterVariant: "checkbox", Cell: ({ cell }) => ( () ? t("button.yes") : t("button.no")} - color={cell.getValue() ? "success" : "error"} + label={cell.getValue() ? t("button.yes") : t("button.no")} + color={cell.getValue() ? "success" : "error"} size="small" sx={{ minWidth: 40 }} /> ), - }, - { - accessorKey: "mustRun", + }), + columnHelper.accessor("mustRun", { header: "Must Run", size: 50, filterVariant: "checkbox", Cell: ({ cell }) => ( () ? t("button.yes") : t("button.no")} - color={cell.getValue() ? "success" : "error"} + label={cell.getValue() ? t("button.yes") : t("button.no")} + color={cell.getValue() ? "success" : "error"} size="small" sx={{ minWidth: 40 }} /> ), - }, - { - accessorKey: "unitCount", + }), + columnHelper.accessor("unitCount", { header: "Unit Count", size: 50, aggregationFn: "sum", AggregatedCell: ({ cell }) => ( - {cell.getValue()} + {cell.getValue()} ), Footer: () => {totalUnitCount}, - }, - { - accessorKey: "nominalCapacity", + }), + columnHelper.accessor("nominalCapacity", { header: "Nominal Capacity (MW)", - size: 200, - Cell: ({ cell }) => cell.getValue().toFixed(1), - }, - { - accessorKey: "installedCapacity", + size: 220, + Cell: ({ cell }) => cell.getValue().toFixed(1), + }), + columnHelper.accessor("installedCapacity", { header: "Enabled / Installed (MW)", - size: 200, + size: 220, aggregationFn: capacityAggregationFn(), AggregatedCell: ({ cell }) => ( - {cell.getValue() ?? ""} + {cell.getValue() ?? ""} ), Cell: ({ row }) => ( @@ -155,22 +107,15 @@ function Thermal() { {totalEnabledCapacity} / {totalInstalledCapacity} ), - }, - { - accessorKey: "marketBidCost", + }), + columnHelper.accessor("marketBidCost", { header: "Market Bid (€/MWh)", size: 50, - Cell: ({ cell }) => <>{cell.getValue().toFixed(2)}, - }, - ], - [ - location.pathname, - navigate, - t, - totalEnabledCapacity, - totalInstalledCapacity, - totalUnitCount, + Cell: ({ cell }) => <>{cell.getValue().toFixed(2)}, + }), ], + // eslint-disable-next-line react-hooks/exhaustive-deps + [t, totalEnabledCapacity, totalInstalledCapacity, totalUnitCount], ); //////////////////////////////////////////////////////////////// @@ -190,6 +135,10 @@ function Thermal() { return deleteThermalClusters(study.id, areaId, ids); }; + const handleNameClick = (row: MRT_Row) => { + navigate(`${location.pathname}/${row.original.id}`); + }; + //////////////////////////////////////////////////////////////// // JSX //////////////////////////////////////////////////////////////// @@ -205,6 +154,7 @@ function Thermal() { groups={THERMAL_GROUPS} onCreate={handleCreateRow} onDelete={handleDeleteSelection} + onNameClick={handleNameClick} /> )} ifRejected={(error) => } From 4ac367d8d3d6a2760028b55ade39a13e2ff37534 Mon Sep 17 00:00:00 2001 From: Samir Kamal <1954121+skamril@users.noreply.github.com> Date: Thu, 21 Mar 2024 18:47:54 +0100 Subject: [PATCH 037/147] feat(clusters-ui): use async loading from GroupedDataTable --- .../Modelization/Areas/Renewables/index.tsx | 27 ++++++---------- .../Modelization/Areas/Storages/index.tsx | 31 +++++++------------ .../Modelization/Areas/Thermal/index.tsx | 27 ++++++---------- .../Modelization/Areas/common/utils.ts | 20 +++++------- 4 files changed, 37 insertions(+), 68 deletions(-) diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Renewables/index.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Renewables/index.tsx index 316a15ca0f..e133d40cec 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Renewables/index.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Renewables/index.tsx @@ -19,9 +19,6 @@ import { capacityAggregationFn, useClusterDataWithCapacity, } from "../common/utils"; -import SimpleLoader from "../../../../../../common/loaders/SimpleLoader"; -import SimpleContent from "../../../../../../common/page/SimpleContent"; -import UsePromiseCond from "../../../../../../common/utils/UsePromiseCond"; function Renewables() { const { study } = useOutletContext<{ study: StudyMetadata }>(); @@ -32,11 +29,11 @@ function Renewables() { const columnHelper = createMRTColumnHelper(); const { - clusters, clustersWithCapacity, totalUnitCount, totalInstalledCapacity, totalEnabledCapacity, + isLoading, } = useClusterDataWithCapacity( () => getRenewableClusters(study.id, areaId), t("studies.error.retrieveData"), @@ -130,20 +127,14 @@ function Renewables() { //////////////////////////////////////////////////////////////// return ( - } - ifResolved={() => ( - - )} - ifRejected={(error) => } + ); } diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/index.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/index.tsx index e3f82110f0..226aaab7b9 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/index.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/index.tsx @@ -7,7 +7,6 @@ import { StudyMetadata } from "../../../../../../../common/types"; import useAppSelector from "../../../../../../../redux/hooks/useAppSelector"; import { getCurrentAreaId } from "../../../../../../../redux/selectors"; import GroupedDataTable from "../../../../../../common/GroupedDataTable"; -import SimpleLoader from "../../../../../../common/loaders/SimpleLoader"; import { Storage, getStorages, @@ -15,8 +14,6 @@ import { createStorage, STORAGE_GROUPS, } from "./utils"; -import SimpleContent from "../../../../../../common/page/SimpleContent"; -import UsePromiseCond from "../../../../../../common/utils/UsePromiseCond"; import usePromiseWithSnackbarError from "../../../../../../../hooks/usePromiseWithSnackbarError"; function Storages() { @@ -27,7 +24,7 @@ function Storages() { const areaId = useAppSelector(getCurrentAreaId); const columnHelper = createMRTColumnHelper(); - const storages = usePromiseWithSnackbarError( + const { data: storages, isLoading } = usePromiseWithSnackbarError( () => getStorages(study.id, areaId), { errorMessage: t("studies.error.retrieveData"), @@ -37,14 +34,14 @@ function Storages() { const { totalWithdrawalNominalCapacity, totalInjectionNominalCapacity } = useMemo(() => { - if (!storages.data) { + if (!storages) { return { totalWithdrawalNominalCapacity: 0, totalInjectionNominalCapacity: 0, }; } - return storages.data.reduce( + return storages.reduce( (acc, { withdrawalNominalCapacity, injectionNominalCapacity }) => { acc.totalWithdrawalNominalCapacity += withdrawalNominalCapacity; acc.totalInjectionNominalCapacity += injectionNominalCapacity; @@ -176,20 +173,14 @@ function Storages() { //////////////////////////////////////////////////////////////// return ( - } - ifResolved={(data) => ( - - )} - ifRejected={(error) => } + ); } diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/index.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/index.tsx index 467d908cc4..671319c617 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/index.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/index.tsx @@ -15,13 +15,10 @@ import { import useAppSelector from "../../../../../../../redux/hooks/useAppSelector"; import { getCurrentAreaId } from "../../../../../../../redux/selectors"; import GroupedDataTable from "../../../../../../common/GroupedDataTable"; -import SimpleLoader from "../../../../../../common/loaders/SimpleLoader"; -import SimpleContent from "../../../../../../common/page/SimpleContent"; import { capacityAggregationFn, useClusterDataWithCapacity, } from "../common/utils"; -import UsePromiseCond from "../../../../../../common/utils/UsePromiseCond"; function Thermal() { const { study } = useOutletContext<{ study: StudyMetadata }>(); @@ -32,11 +29,11 @@ function Thermal() { const columnHelper = createMRTColumnHelper(); const { - clusters, clustersWithCapacity, totalUnitCount, totalInstalledCapacity, totalEnabledCapacity, + isLoading, } = useClusterDataWithCapacity( () => getThermalClusters(study.id, areaId), t("studies.error.retrieveData"), @@ -144,20 +141,14 @@ function Thermal() { //////////////////////////////////////////////////////////////// return ( - } - ifResolved={() => ( - - )} - ifRejected={(error) => } + ); } diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/common/utils.ts b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/common/utils.ts index 9231804f53..1128a0fc60 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/common/utils.ts +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/common/utils.ts @@ -6,7 +6,6 @@ import { editStudy } from "../../../../../../../services/api/study"; import { ThermalClusterWithCapacity } from "../Thermal/utils"; import { RenewableClusterWithCapacity } from "../Renewables/utils"; import usePromiseWithSnackbarError from "../../../../../../../hooks/usePromiseWithSnackbarError"; -import { UsePromiseResponse } from "../../../../../../../hooks/usePromise"; export const saveField = R.curry( ( @@ -64,11 +63,11 @@ type ClusterWithCapacity = T & { }; interface UseClusterDataWithCapacityReturn { - clusters: UsePromiseResponse; clustersWithCapacity: Array>; totalUnitCount: number; totalInstalledCapacity: number; totalEnabledCapacity: number; + isLoading: boolean; } export const useClusterDataWithCapacity = ( @@ -76,23 +75,20 @@ export const useClusterDataWithCapacity = ( errorMessage: string, deps: DependencyList, ): UseClusterDataWithCapacityReturn => { - const clusters: UsePromiseResponse = usePromiseWithSnackbarError( - fetchFn, - { - errorMessage, - deps, - }, - ); + const { data: clusters, isLoading } = usePromiseWithSnackbarError(fetchFn, { + errorMessage, + deps, + }); const clustersWithCapacity: Array> = useMemo( () => - clusters.data?.map((cluster) => { + clusters?.map((cluster) => { const { unitCount, nominalCapacity, enabled } = cluster; const installedCapacity = unitCount * nominalCapacity; const enabledCapacity = enabled ? installedCapacity : 0; return { ...cluster, installedCapacity, enabledCapacity }; }) || [], - [clusters.data], + [clusters], ); const { totalUnitCount, totalInstalledCapacity, totalEnabledCapacity } = @@ -113,10 +109,10 @@ export const useClusterDataWithCapacity = ( }, [clustersWithCapacity]); return { - clusters, clustersWithCapacity, totalUnitCount: Math.floor(totalUnitCount), totalInstalledCapacity: Math.floor(totalInstalledCapacity), totalEnabledCapacity: Math.floor(totalEnabledCapacity), + isLoading, }; }; From 35cb2cf03f398d9ad273a91ab914135af6912e62 Mon Sep 17 00:00:00 2001 From: Samir Kamal <1954121+skamril@users.noreply.github.com> Date: Mon, 25 Mar 2024 10:10:43 +0100 Subject: [PATCH 038/147] feat(common-ui): add `deleteConfirmationMessage` prop in GroupedDataTable --- webapp/public/locales/en/main.json | 4 +++- webapp/public/locales/fr/main.json | 4 +++- .../explore/Modelization/Areas/Renewables/index.tsx | 3 +++ .../explore/Modelization/Areas/Storages/index.tsx | 3 +++ .../explore/Modelization/Areas/Thermal/index.tsx | 3 +++ webapp/src/components/common/GroupedDataTable/index.tsx | 7 ++++++- 6 files changed, 21 insertions(+), 3 deletions(-) diff --git a/webapp/public/locales/en/main.json b/webapp/public/locales/en/main.json index 8216534daa..4902a37596 100644 --- a/webapp/public/locales/en/main.json +++ b/webapp/public/locales/en/main.json @@ -95,6 +95,7 @@ "data.title": "Data", "dialog.title.confirmation": "Confirmation", "dialog.message.logout": "Are you sure you want to logout?", + "dialog.message.confirmDelete": "Do you confirm the deletion?", "button.collapse": "Collapse", "button.expand": "Expand", "button.yes": "Yes", @@ -488,7 +489,8 @@ "study.modelization.clusters.backClusterList": "Back to cluster list", "study.modelization.clusters.tsInterpretation": "TS interpretation", "study.modelization.clusters.group": "Group", - "studies.modelization.clusters.question.delete": "Are you sure you want to delete this cluster?", + "studies.modelization.clusters.question.delete_one": "Are you sure you want to delete this cluster?", + "studies.modelization.clusters.question.delete_other": "Are you sure you want to delete these {{count}} clusters?", "study.modelization.bindingConst.comments": "Comments", "study.modelization.bindingConst.type": "Type", "study.modelization.bindingConst.constraints": "Constraints", diff --git a/webapp/public/locales/fr/main.json b/webapp/public/locales/fr/main.json index 4188d598ca..5b0f6481c7 100644 --- a/webapp/public/locales/fr/main.json +++ b/webapp/public/locales/fr/main.json @@ -95,6 +95,7 @@ "data.title": "Données", "dialog.title.confirmation": "Confirmation", "dialog.message.logout": "Êtes vous sûr de vouloir vous déconnecter ?", + "dialog.message.confirmDelete": "Confirmez-vous la suppression ?", "button.collapse": "Réduire", "button.expand": "Étendre", "button.yes": "Oui", @@ -488,7 +489,8 @@ "study.modelization.clusters.backClusterList": "Retour à la liste des clusters", "study.modelization.clusters.tsInterpretation": "TS interpretation", "study.modelization.clusters.group": "Groupes", - "studies.modelization.clusters.question.delete": "Êtes-vous sûr de vouloir supprimer ce cluster ?", + "studies.modelization.clusters.question.delete_one": "Êtes-vous sûr de vouloir supprimer ce cluster ?", + "studies.modelization.clusters.question.delete_other": "Êtes-vous sûr de vouloir supprimer ces {{count}} clusters ?", "study.modelization.bindingConst.comments": "Commentaires", "study.modelization.bindingConst.type": "Type", "study.modelization.bindingConst.constraints": "Contraintes", diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Renewables/index.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Renewables/index.tsx index e133d40cec..cd1a467e54 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Renewables/index.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Renewables/index.tsx @@ -135,6 +135,9 @@ function Renewables() { onCreate={handleCreateRow} onDelete={handleDeleteSelection} onNameClick={handleNameClick} + deleteConfirmationMessage={(count) => + t("studies.modelization.clusters.question.delete", { count }) + } /> ); } diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/index.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/index.tsx index 226aaab7b9..f6f417f3c8 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/index.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/index.tsx @@ -181,6 +181,9 @@ function Storages() { onCreate={handleCreateRow} onDelete={handleDeleteSelection} onNameClick={handleNameClick} + deleteConfirmationMessage={(count) => + t("studies.modelization.clusters.question.delete", { count }) + } /> ); } diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/index.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/index.tsx index 671319c617..46bd508ae3 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/index.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/index.tsx @@ -149,6 +149,9 @@ function Thermal() { onCreate={handleCreateRow} onDelete={handleDeleteSelection} onNameClick={handleNameClick} + deleteConfirmationMessage={(count) => + t("studies.modelization.clusters.question.delete", { count }) + } /> ); } diff --git a/webapp/src/components/common/GroupedDataTable/index.tsx b/webapp/src/components/common/GroupedDataTable/index.tsx index 94989292dd..936ab4dfe6 100644 --- a/webapp/src/components/common/GroupedDataTable/index.tsx +++ b/webapp/src/components/common/GroupedDataTable/index.tsx @@ -22,6 +22,7 @@ import DuplicateDialog from "./DuplicateDialog"; import { translateWithColon } from "../../../utils/i18nUtils"; import useAutoUpdateRef from "../../../hooks/useAutoUpdateRef"; import * as R from "ramda"; +import * as RA from "ramda-adjunct"; import { usePrevious } from "react-use"; import useUpdateEffectOnce from "../../../hooks/useUpdateEffectOnce"; @@ -34,6 +35,7 @@ export interface GroupedDataTableProps { onDelete?: (ids: string[]) => void; onNameClick?: (row: MRT_Row) => void; isLoading?: boolean; + deleteConfirmationMessage?: string | ((count: number) => string); } // Use ids to identify default columns (instead of `accessorKey`), @@ -50,6 +52,7 @@ function GroupedDataTable({ onDelete, onNameClick, isLoading, + deleteConfirmationMessage, }: GroupedDataTableProps) { const { t } = useTranslation(); const [openDialog, setOpenDialog] = useState< @@ -306,7 +309,9 @@ function GroupedDataTable({ onConfirm={handleDelete} alert="warning" > - {t("studies.modelization.clusters.question.delete")} + {RA.isFunction(deleteConfirmationMessage) + ? deleteConfirmationMessage(selectedRows.length) + : deleteConfirmationMessage ?? t("dialog.message.confirmDelete")} )} From 35c8fe1b6f0eab2adff435cb6cec0f1984111fa2 Mon Sep 17 00:00:00 2001 From: Samir Kamal <1954121+skamril@users.noreply.github.com> Date: Fri, 22 Mar 2024 11:44:41 +0100 Subject: [PATCH 039/147] feat(utils-ui): add `PromiseAny` and `PartialExceptFor` types in tsUtils --- webapp/src/utils/tsUtils.ts | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/webapp/src/utils/tsUtils.ts b/webapp/src/utils/tsUtils.ts index eb60713aa8..7acf6465a2 100644 --- a/webapp/src/utils/tsUtils.ts +++ b/webapp/src/utils/tsUtils.ts @@ -1,3 +1,16 @@ +import { O } from "ts-toolbelt"; + +/** + * Allow to use `any` with `Promise` type without disabling ESLint rule. + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +export type PromiseAny = Promise; + +/** + * Make all properties in T optional, except for those specified by K. + */ +export type PartialExceptFor = O.Required, K>; + export function tuple(...items: T): T { return items; } From 02a651692afd46d40a73ce63d1b660a37e04013e Mon Sep 17 00:00:00 2001 From: Samir Kamal <1954121+skamril@users.noreply.github.com> Date: Fri, 22 Mar 2024 09:44:06 +0100 Subject: [PATCH 040/147] feat(utils-ui): add `toError` in fnUtils --- webapp/src/utils/fnUtils.ts | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/webapp/src/utils/fnUtils.ts b/webapp/src/utils/fnUtils.ts index 226e58c836..dd32ac4c97 100644 --- a/webapp/src/utils/fnUtils.ts +++ b/webapp/src/utils/fnUtils.ts @@ -11,3 +11,22 @@ export function voidFn(...args: TArgs) { // Intentionally empty, as its purpose is to do nothing. } + +/** + * A utility function that converts an unknown value to an Error object. + * If the value is already an Error object, it is returned as is. + * If the value is a string, it is used as the message for the new Error object. + * If the value is anything else, a new Error object with a generic message is created. + * + * @param error - The value to convert to an Error object. + * @returns An Error object. + */ +export function toError(error: unknown) { + if (error instanceof Error) { + return error; + } + if (typeof error === "string") { + return new Error(error); + } + return new Error("An unknown error occurred"); +} From e3b85e00563e989ce7e209aa3adba8ec09d936d4 Mon Sep 17 00:00:00 2001 From: Samir Kamal <1954121+skamril@users.noreply.github.com> Date: Sun, 24 Mar 2024 15:06:52 +0100 Subject: [PATCH 041/147] feat(hooks-ui): create `useOperationInProgressCount` hook --- .../src/hooks/useOperationInProgressCount.ts | 51 +++++++++++++++++++ 1 file changed, 51 insertions(+) create mode 100644 webapp/src/hooks/useOperationInProgressCount.ts diff --git a/webapp/src/hooks/useOperationInProgressCount.ts b/webapp/src/hooks/useOperationInProgressCount.ts new file mode 100644 index 0000000000..bc71fb677a --- /dev/null +++ b/webapp/src/hooks/useOperationInProgressCount.ts @@ -0,0 +1,51 @@ +import { useMemo, useState } from "react"; +import * as R from "ramda"; + +/** + * Hook to tracks the number of CRUD operations in progress. + * + * @returns An object containing methods to increment, decrement, + * and retrieve the count of each operation type. + */ +function useOperationInProgressCount() { + const [opsInProgressCount, setOpsInProgressCount] = useState({ + create: 0, + read: 0, + update: 0, + delete: 0, + }); + + const makeOperationMethods = ( + operation: keyof typeof opsInProgressCount, + ) => ({ + increment: (number = 1) => { + setOpsInProgressCount((prev) => ({ + ...prev, + [operation]: prev[operation] + number, + })); + }, + decrement: (number = 1) => { + setOpsInProgressCount((prev) => ({ + ...prev, + [operation]: Math.max(prev[operation] - number, 0), + })); + }, + total: opsInProgressCount[operation], + }); + + const methods = useMemo( + () => ({ + createOps: makeOperationMethods("create"), + readOps: makeOperationMethods("read"), + updateOps: makeOperationMethods("update"), + deleteOps: makeOperationMethods("delete"), + totalOps: Object.values(opsInProgressCount).reduce(R.add, 0), + }), + // eslint-disable-next-line react-hooks/exhaustive-deps + [opsInProgressCount], + ); + + return methods; +} + +export default useOperationInProgressCount; From 5d322a1782eddc4e107bd7a76bbb234ef96f1eee Mon Sep 17 00:00:00 2001 From: Samir Kamal <1954121+skamril@users.noreply.github.com> Date: Fri, 12 Apr 2024 11:07:48 +0200 Subject: [PATCH 042/147] fix(clusters-ui): issues with deletion --- webapp/public/locales/en/main.json | 1 + webapp/public/locales/fr/main.json | 1 + .../Modelization/Areas/Renewables/index.tsx | 5 ++- .../Modelization/Areas/Storages/index.tsx | 5 ++- .../Modelization/Areas/Thermal/index.tsx | 5 ++- .../common/GroupedDataTable/index.tsx | 37 +++++++++++++------ 6 files changed, 36 insertions(+), 18 deletions(-) diff --git a/webapp/public/locales/en/main.json b/webapp/public/locales/en/main.json index 4902a37596..251ea6b36a 100644 --- a/webapp/public/locales/en/main.json +++ b/webapp/public/locales/en/main.json @@ -82,6 +82,7 @@ "global.error.failedtoretrievejobs": "Failed to retrieve job information", "global.error.failedtoretrievelogs": "Failed to retrieve job logs", "global.error.failedtoretrievedownloads": "Failed to retrieve downloads list", + "global.error.delete": "Deletion failed", "global.area.add": "Add an area", "login.error": "Failed to authenticate", "tasks.title": "Tasks", diff --git a/webapp/public/locales/fr/main.json b/webapp/public/locales/fr/main.json index 5b0f6481c7..1a9e434965 100644 --- a/webapp/public/locales/fr/main.json +++ b/webapp/public/locales/fr/main.json @@ -82,6 +82,7 @@ "global.error.failedtoretrievejobs": "Échec de la récupération des tâches", "global.error.failedtoretrievelogs": "Échec de la récupération des logs", "global.error.failedtoretrievedownloads": "Échec de la récupération des exports", + "global.error.delete": "La suppression a échoué", "global.area.add": "Ajouter une zone", "login.error": "Échec de l'authentification", "tasks.title": "Tâches", diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Renewables/index.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Renewables/index.tsx index cd1a467e54..cfc2c726f5 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Renewables/index.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Renewables/index.tsx @@ -114,7 +114,8 @@ function Renewables() { return createRenewableCluster(study.id, areaId, cluster); }; - const handleDeleteSelection = (ids: string[]) => { + const handleDelete = (rows: RenewableClusterWithCapacity[]) => { + const ids = rows.map((row) => row.id); return deleteRenewableClusters(study.id, areaId, ids); }; @@ -133,7 +134,7 @@ function Renewables() { columns={columns} groups={RENEWABLE_GROUPS} onCreate={handleCreateRow} - onDelete={handleDeleteSelection} + onDelete={handleDelete} onNameClick={handleNameClick} deleteConfirmationMessage={(count) => t("studies.modelization.clusters.question.delete", { count }) diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/index.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/index.tsx index f6f417f3c8..f8fa054c18 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/index.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/index.tsx @@ -160,7 +160,8 @@ function Storages() { return createStorage(study.id, areaId, storage); }; - const handleDeleteSelection = (ids: string[]) => { + const handleDelete = (rows: Storage[]) => { + const ids = rows.map((row) => row.id); return deleteStorages(study.id, areaId, ids); }; @@ -179,7 +180,7 @@ function Storages() { columns={columns} groups={STORAGE_GROUPS} onCreate={handleCreateRow} - onDelete={handleDeleteSelection} + onDelete={handleDelete} onNameClick={handleNameClick} deleteConfirmationMessage={(count) => t("studies.modelization.clusters.question.delete", { count }) diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/index.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/index.tsx index 46bd508ae3..e410897db7 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/index.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/index.tsx @@ -128,7 +128,8 @@ function Thermal() { return createThermalCluster(study.id, areaId, cluster); }; - const handleDeleteSelection = (ids: string[]) => { + const handleDelete = (rows: ThermalClusterWithCapacity[]) => { + const ids = rows.map((row) => row.id); return deleteThermalClusters(study.id, areaId, ids); }; @@ -147,7 +148,7 @@ function Thermal() { columns={columns} groups={THERMAL_GROUPS} onCreate={handleCreateRow} - onDelete={handleDeleteSelection} + onDelete={handleDelete} onNameClick={handleNameClick} deleteConfirmationMessage={(count) => t("studies.modelization.clusters.question.delete", { count }) diff --git a/webapp/src/components/common/GroupedDataTable/index.tsx b/webapp/src/components/common/GroupedDataTable/index.tsx index 936ab4dfe6..5b49b5ac57 100644 --- a/webapp/src/components/common/GroupedDataTable/index.tsx +++ b/webapp/src/components/common/GroupedDataTable/index.tsx @@ -25,6 +25,10 @@ import * as R from "ramda"; import * as RA from "ramda-adjunct"; import { usePrevious } from "react-use"; import useUpdateEffectOnce from "../../../hooks/useUpdateEffectOnce"; +import { PromiseAny } from "../../../utils/tsUtils"; +import useEnqueueErrorSnackbar from "../../../hooks/useEnqueueErrorSnackbar"; +import { toError } from "../../../utils/fnUtils"; +import useOperationInProgressCount from "../../../hooks/useOperationInProgressCount"; export interface GroupedDataTableProps { data: TData[]; @@ -32,7 +36,7 @@ export interface GroupedDataTableProps { columns: Array>; groups: string[] | readonly string[]; onCreate?: (values: TData) => Promise; - onDelete?: (ids: string[]) => void; + onDelete?: (rows: TData[]) => PromiseAny | void; onNameClick?: (row: MRT_Row) => void; isLoading?: boolean; deleteConfirmationMessage?: string | ((count: number) => string); @@ -60,9 +64,11 @@ function GroupedDataTable({ >(""); const [tableData, setTableData] = useState(data); const [rowSelection, setRowSelection] = useState({}); + const enqueueErrorSnackbar = useEnqueueErrorSnackbar(); // Allow to use the last version of `onNameClick` in `tableColumns` const callbacksRef = useAutoUpdateRef({ onNameClick }); const prevData = usePrevious(data); + const { deleteOps, totalOps } = useOperationInProgressCount(); // Update once `data` only if previous value was empty. // It allows to handle loading data. @@ -129,7 +135,7 @@ function GroupedDataTable({ expanded: true, columnPinning: { left: [GROUP_COLUMN_ID] }, }, - state: { isLoading, rowSelection }, + state: { isLoading, isSaving: totalOps > 0, rowSelection }, enableGrouping: true, enableStickyFooter: true, enableStickyHeader: true, @@ -235,24 +241,31 @@ function GroupedDataTable({ } }; - const handleDelete = () => { + const handleDelete = async () => { + closeDialog(); + if (!onDelete) { return; } - const rowIndexes = Object.keys(rowSelection) - .map(Number) - // ignore groups names - .filter(Number.isInteger); + setRowSelection({}); - const rowIdsToDelete = rowIndexes.map((index) => tableData[index].id); + const rowsToDelete = selectedRows; - onDelete(rowIdsToDelete); setTableData((prevTableData) => - prevTableData.filter((row) => !rowIdsToDelete.includes(row.id)), + prevTableData.filter((row) => !rowsToDelete.includes(row)), ); - setRowSelection({}); - closeDialog(); + + deleteOps.increment(); + + try { + await onDelete(rowsToDelete); + } catch (error) { + enqueueErrorSnackbar(t("global.error.delete"), toError(error)); + setTableData((prevTableData) => [...prevTableData, ...rowsToDelete]); + } + + deleteOps.decrement(); }; const handleDuplicate = async (name: string) => { From e8cd11e26b6f5fe0250fe1a146de00c89f34eed9 Mon Sep 17 00:00:00 2001 From: Samir Kamal <1954121+skamril@users.noreply.github.com> Date: Fri, 12 Apr 2024 12:20:52 +0200 Subject: [PATCH 043/147] feat(common-ui,clusters-ui): implement optimistic row creation * fix TS mistakes * update i18n --- webapp/public/locales/en/main.json | 4 +- webapp/public/locales/fr/main.json | 4 +- .../Modelization/Areas/Renewables/Fields.tsx | 2 +- .../Modelization/Areas/Renewables/index.tsx | 21 ++- .../Modelization/Areas/Renewables/utils.ts | 12 +- .../Modelization/Areas/Storages/Fields.tsx | 2 +- .../Modelization/Areas/Storages/index.tsx | 10 +- .../Modelization/Areas/Storages/utils.ts | 7 +- .../Modelization/Areas/Thermal/Fields.tsx | 2 +- .../Modelization/Areas/Thermal/index.tsx | 21 ++- .../Modelization/Areas/Thermal/utils.ts | 12 +- .../Modelization/Areas/common/utils.ts | 27 +-- .../common/GroupedDataTable/CreateDialog.tsx | 46 ++--- .../common/GroupedDataTable/index.tsx | 167 +++++++++++++----- .../common/GroupedDataTable/types.ts | 4 + .../common/GroupedDataTable/utils.ts | 26 +-- webapp/src/utils/validationUtils.ts | 2 +- 17 files changed, 215 insertions(+), 154 deletions(-) create mode 100644 webapp/src/components/common/GroupedDataTable/types.ts diff --git a/webapp/public/locales/en/main.json b/webapp/public/locales/en/main.json index 251ea6b36a..f3d8e241a9 100644 --- a/webapp/public/locales/en/main.json +++ b/webapp/public/locales/en/main.json @@ -82,6 +82,7 @@ "global.error.failedtoretrievejobs": "Failed to retrieve job information", "global.error.failedtoretrievelogs": "Failed to retrieve job logs", "global.error.failedtoretrievedownloads": "Failed to retrieve downloads list", + "global.error.create": "Creation failed", "global.error.delete": "Deletion failed", "global.area.add": "Add an area", "login.error": "Failed to authenticate", @@ -120,7 +121,7 @@ "form.submit.inProgress": "The form is being submitted. Are you sure you want to leave the page?", "form.asyncDefaultValues.error": "Failed to get values", "form.field.required": "Field required", - "form.field.duplicate": "Value already exists: {{0}}", + "form.field.duplicate": "Value already exists", "form.field.minLength": "{{0}} character(s) minimum", "form.field.minValue": "The minimum value is {{0}}", "form.field.maxValue": "The maximum value is {{0}}", @@ -489,7 +490,6 @@ "study.modelization.clusters.matrix.timeSeries": "Time-Series", "study.modelization.clusters.backClusterList": "Back to cluster list", "study.modelization.clusters.tsInterpretation": "TS interpretation", - "study.modelization.clusters.group": "Group", "studies.modelization.clusters.question.delete_one": "Are you sure you want to delete this cluster?", "studies.modelization.clusters.question.delete_other": "Are you sure you want to delete these {{count}} clusters?", "study.modelization.bindingConst.comments": "Comments", diff --git a/webapp/public/locales/fr/main.json b/webapp/public/locales/fr/main.json index 1a9e434965..bec0e911cc 100644 --- a/webapp/public/locales/fr/main.json +++ b/webapp/public/locales/fr/main.json @@ -82,6 +82,7 @@ "global.error.failedtoretrievejobs": "Échec de la récupération des tâches", "global.error.failedtoretrievelogs": "Échec de la récupération des logs", "global.error.failedtoretrievedownloads": "Échec de la récupération des exports", + "global.error.create": "La création a échoué", "global.error.delete": "La suppression a échoué", "global.area.add": "Ajouter une zone", "login.error": "Échec de l'authentification", @@ -120,7 +121,7 @@ "form.submit.inProgress": "Le formulaire est en cours de soumission. Etes-vous sûr de vouloir quitter la page ?", "form.asyncDefaultValues.error": "Impossible d'obtenir les valeurs", "form.field.required": "Champ requis", - "form.field.duplicate": "Cette valeur existe déjà: {{0}}", + "form.field.duplicate": "Cette valeur existe déjà", "form.field.minLength": "{{0}} caractère(s) minimum", "form.field.minValue": "La valeur minimum est {{0}}", "form.field.maxValue": "La valeur maximum est {{0}}", @@ -489,7 +490,6 @@ "study.modelization.clusters.matrix.timeSeries": "Séries temporelles", "study.modelization.clusters.backClusterList": "Retour à la liste des clusters", "study.modelization.clusters.tsInterpretation": "TS interpretation", - "study.modelization.clusters.group": "Groupes", "studies.modelization.clusters.question.delete_one": "Êtes-vous sûr de vouloir supprimer ce cluster ?", "studies.modelization.clusters.question.delete_other": "Êtes-vous sûr de vouloir supprimer ces {{count}} clusters ?", "study.modelization.bindingConst.comments": "Commentaires", diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Renewables/Fields.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Renewables/Fields.tsx index 16e45f7c53..7b7ea9774c 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Renewables/Fields.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Renewables/Fields.tsx @@ -29,7 +29,7 @@ function Fields() { disabled /> (); @@ -86,8 +89,8 @@ function Renewables() { ), Cell: ({ row }) => ( <> - {Math.floor(row.original.enabledCapacity ?? 0)} /{" "} - {Math.floor(row.original.installedCapacity ?? 0)} + {Math.floor(row.original.enabledCapacity)} /{" "} + {Math.floor(row.original.installedCapacity)} ), Footer: () => ( @@ -105,13 +108,9 @@ function Renewables() { // Event handlers //////////////////////////////////////////////////////////////// - const handleCreateRow = ({ - id, - installedCapacity, - enabledCapacity, - ...cluster - }: RenewableClusterWithCapacity) => { - return createRenewableCluster(study.id, areaId, cluster); + const handleCreate = async (values: TRow) => { + const cluster = await createRenewableCluster(study.id, areaId, values); + return addCapacity(cluster); }; const handleDelete = (rows: RenewableClusterWithCapacity[]) => { @@ -132,8 +131,8 @@ function Renewables() { isLoading={isLoading} data={clustersWithCapacity} columns={columns} - groups={RENEWABLE_GROUPS} - onCreate={handleCreateRow} + groups={[...RENEWABLE_GROUPS]} + onCreate={handleCreate} onDelete={handleDelete} onNameClick={handleNameClick} deleteConfirmationMessage={(count) => diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Renewables/utils.ts b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Renewables/utils.ts index 074a19c84f..9ac4a6eb02 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Renewables/utils.ts +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Renewables/utils.ts @@ -4,6 +4,7 @@ import { StudyMetadata, } from "../../../../../../../common/types"; import client from "../../../../../../../services/api/client"; +import type { PartialExceptFor } from "../../../../../../../utils/tsUtils"; //////////////////////////////////////////////////////////////// // Constants @@ -30,8 +31,9 @@ export const TS_INTERPRETATION_OPTIONS = [ // Types //////////////////////////////////////////////////////////////// +export type RenewableGroup = (typeof RENEWABLE_GROUPS)[number]; + type TimeSeriesInterpretation = (typeof TS_INTERPRETATION_OPTIONS)[number]; -type RenewableGroup = (typeof RENEWABLE_GROUPS)[number]; export interface RenewableFormFields { name: string; @@ -115,12 +117,12 @@ export async function updateRenewableCluster( ); } -export async function createRenewableCluster( +export function createRenewableCluster( studyId: StudyMetadata["id"], areaId: Area["name"], - data: Partial, -): Promise { - return makeRequest( + data: PartialExceptFor, +) { + return makeRequest( "post", getClustersUrl(studyId, areaId), data, diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/Fields.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/Fields.tsx index 8485fd29e6..9d6935b3fe 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/Fields.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/Fields.tsx @@ -25,7 +25,7 @@ function Fields() { disabled /> (); @@ -156,8 +158,8 @@ function Storages() { // Event handlers //////////////////////////////////////////////////////////////// - const handleCreateRow = ({ id, ...storage }: Storage) => { - return createStorage(study.id, areaId, storage); + const handleCreate = (values: TRow) => { + return createStorage(study.id, areaId, values); }; const handleDelete = (rows: Storage[]) => { @@ -178,8 +180,8 @@ function Storages() { isLoading={isLoading} data={storages || []} columns={columns} - groups={STORAGE_GROUPS} - onCreate={handleCreateRow} + groups={[...STORAGE_GROUPS]} + onCreate={handleCreate} onDelete={handleDelete} onNameClick={handleNameClick} deleteConfirmationMessage={(count) => diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/utils.ts b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/utils.ts index 1226bcac66..6855ff566c 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/utils.ts +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/utils.ts @@ -1,5 +1,6 @@ import { StudyMetadata, Area } from "../../../../../../../common/types"; import client from "../../../../../../../services/api/client"; +import type { PartialExceptFor } from "../../../../../../../utils/tsUtils"; //////////////////////////////////////////////////////////////// // Constants @@ -87,11 +88,11 @@ export async function updateStorage( ); } -export async function createStorage( +export function createStorage( studyId: StudyMetadata["id"], areaId: Area["name"], - data: Partial, -): Promise { + data: PartialExceptFor, +) { return makeRequest("post", getStoragesUrl(studyId, areaId), data); } diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/Fields.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/Fields.tsx index cf5cb2fc66..ec5d6fc632 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/Fields.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/Fields.tsx @@ -35,7 +35,7 @@ function Fields() { disabled /> (); @@ -95,8 +98,8 @@ function Thermal() { ), Cell: ({ row }) => ( <> - {Math.floor(row.original.enabledCapacity ?? 0)} /{" "} - {Math.floor(row.original.installedCapacity ?? 0)} + {Math.floor(row.original.enabledCapacity)} /{" "} + {Math.floor(row.original.installedCapacity)} ), Footer: () => ( @@ -119,13 +122,9 @@ function Thermal() { // Event handlers //////////////////////////////////////////////////////////////// - const handleCreateRow = ({ - id, - installedCapacity, - enabledCapacity, - ...cluster - }: ThermalClusterWithCapacity) => { - return createThermalCluster(study.id, areaId, cluster); + const handleCreate = async (values: TRow) => { + const cluster = await createThermalCluster(study.id, areaId, values); + return addCapacity(cluster); }; const handleDelete = (rows: ThermalClusterWithCapacity[]) => { @@ -146,8 +145,8 @@ function Thermal() { isLoading={isLoading} data={clustersWithCapacity} columns={columns} - groups={THERMAL_GROUPS} - onCreate={handleCreateRow} + groups={[...THERMAL_GROUPS]} + onCreate={handleCreate} onDelete={handleDelete} onNameClick={handleNameClick} deleteConfirmationMessage={(count) => diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/utils.ts b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/utils.ts index d113e06c4f..730ce37db2 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/utils.ts +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/utils.ts @@ -4,6 +4,7 @@ import { StudyMetadata, } from "../../../../../../../common/types"; import client from "../../../../../../../services/api/client"; +import type { PartialExceptFor } from "../../../../../../../utils/tsUtils"; //////////////////////////////////////////////////////////////// // Constants @@ -51,7 +52,8 @@ export const TS_LAW_OPTIONS = ["geometric", "uniform"] as const; // Types //////////////////////////////////////////////////////////////// -type ThermalGroup = (typeof THERMAL_GROUPS)[number]; +export type ThermalGroup = (typeof THERMAL_GROUPS)[number]; + type LocalTSGenerationBehavior = (typeof TS_GENERATION_OPTIONS)[number]; type TimeSeriesLawOption = (typeof TS_LAW_OPTIONS)[number]; @@ -143,12 +145,12 @@ export async function updateThermalCluster( ); } -export async function createThermalCluster( +export function createThermalCluster( studyId: StudyMetadata["id"], areaId: Area["name"], - data: Partial, -): Promise { - return makeRequest( + data: PartialExceptFor, +) { + return makeRequest( "post", getClustersUrl(studyId, areaId), data, diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/common/utils.ts b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/common/utils.ts index 1128a0fc60..95169a6f77 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/common/utils.ts +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/common/utils.ts @@ -81,23 +81,17 @@ export const useClusterDataWithCapacity = ( }); const clustersWithCapacity: Array> = useMemo( - () => - clusters?.map((cluster) => { - const { unitCount, nominalCapacity, enabled } = cluster; - const installedCapacity = unitCount * nominalCapacity; - const enabledCapacity = enabled ? installedCapacity : 0; - return { ...cluster, installedCapacity, enabledCapacity }; - }) || [], + () => clusters?.map(addCapacity) || [], [clusters], ); const { totalUnitCount, totalInstalledCapacity, totalEnabledCapacity } = useMemo(() => { return clustersWithCapacity.reduce( - (acc, { unitCount, nominalCapacity, enabled }) => { + (acc, { unitCount, installedCapacity, enabledCapacity }) => { acc.totalUnitCount += unitCount; - acc.totalInstalledCapacity += unitCount * nominalCapacity; - acc.totalEnabledCapacity += enabled ? unitCount * nominalCapacity : 0; + acc.totalInstalledCapacity += installedCapacity; + acc.totalEnabledCapacity += enabledCapacity; return acc; }, { @@ -116,3 +110,16 @@ export const useClusterDataWithCapacity = ( isLoading, }; }; + +/** + * Adds the installed and enabled capacity fields to a cluster. + * + * @param cluster - The cluster to add the capacity fields to. + * @returns The cluster with the installed and enabled capacity fields added. + */ +export function addCapacity(cluster: T) { + const { unitCount, nominalCapacity, enabled } = cluster; + const installedCapacity = unitCount * nominalCapacity; + const enabledCapacity = enabled ? installedCapacity : 0; + return { ...cluster, installedCapacity, enabledCapacity }; +} diff --git a/webapp/src/components/common/GroupedDataTable/CreateDialog.tsx b/webapp/src/components/common/GroupedDataTable/CreateDialog.tsx index 5c8313a352..9df4f4e2c5 100644 --- a/webapp/src/components/common/GroupedDataTable/CreateDialog.tsx +++ b/webapp/src/components/common/GroupedDataTable/CreateDialog.tsx @@ -1,48 +1,38 @@ -import { t } from "i18next"; import AddCircleIcon from "@mui/icons-material/AddCircle"; import FormDialog from "../dialogs/FormDialog"; import StringFE from "../fieldEditors/StringFE"; import Fieldset from "../Fieldset"; import { SubmitHandlerPlus } from "../Form/types"; import SelectFE from "../fieldEditors/SelectFE"; -import { nameToId } from "../../../services/utils"; -import { TRow } from "./utils"; import { validateString } from "../../../utils/validationUtils"; +import type { TRow } from "./types"; +import { useTranslation } from "react-i18next"; -interface Props { +interface Props { open: boolean; onClose: VoidFunction; - onSubmit: (values: TData) => Promise; - groups: string[] | readonly string[]; - existingNames: Array; + onSubmit: (values: TRow) => Promise; + groups: string[]; + existingNames: Array; } -const defaultValues = { - name: "", - group: "", -}; - -function CreateDialog({ +function CreateDialog({ open, onClose, onSubmit, groups, existingNames, -}: Props) { +}: Props) { + const { t } = useTranslation(); + //////////////////////////////////////////////////////////////// // Event Handlers //////////////////////////////////////////////////////////////// - const handleSubmit = async ({ - values, - }: SubmitHandlerPlus) => { - await onSubmit({ - ...values, - id: nameToId(values.name), - name: values.name.trim(), - } as TData); - - onClose(); + const handleSubmit = ({ + values: { name, group }, + }: SubmitHandlerPlus) => { + return onSubmit({ name: name.trim(), group }); }; //////////////////////////////////////////////////////////////// @@ -56,7 +46,6 @@ function CreateDialog({ open={open} onCancel={onClose} onSubmit={handleSubmit} - config={{ defaultValues }} > {({ control }) => (
@@ -72,14 +61,11 @@ function CreateDialog({ sx={{ m: 0 }} />
)} diff --git a/webapp/src/components/common/GroupedDataTable/index.tsx b/webapp/src/components/common/GroupedDataTable/index.tsx index 5b49b5ac57..01fbd9ca6b 100644 --- a/webapp/src/components/common/GroupedDataTable/index.tsx +++ b/webapp/src/components/common/GroupedDataTable/index.tsx @@ -3,7 +3,7 @@ import AddCircleOutlineIcon from "@mui/icons-material/AddCircleOutline"; import ContentCopyIcon from "@mui/icons-material/ContentCopy"; import DeleteOutlineIcon from "@mui/icons-material/DeleteOutline"; import DeleteIcon from "@mui/icons-material/Delete"; -import { Button } from "@mui/material"; +import { Button, Skeleton } from "@mui/material"; import { MaterialReactTable, MRT_ToggleFiltersButton, @@ -14,10 +14,10 @@ import { type MRT_Row, } from "material-react-table"; import { useTranslation } from "react-i18next"; -import { useMemo, useState } from "react"; +import { useMemo, useRef, useState } from "react"; import CreateDialog from "./CreateDialog"; import ConfirmationDialog from "../dialogs/ConfirmationDialog"; -import { TRow, generateUniqueValue, getTableOptionsForAlign } from "./utils"; +import { generateUniqueValue, getTableOptionsForAlign } from "./utils"; import DuplicateDialog from "./DuplicateDialog"; import { translateWithColon } from "../../../utils/i18nUtils"; import useAutoUpdateRef from "../../../hooks/useAutoUpdateRef"; @@ -29,13 +29,17 @@ import { PromiseAny } from "../../../utils/tsUtils"; import useEnqueueErrorSnackbar from "../../../hooks/useEnqueueErrorSnackbar"; import { toError } from "../../../utils/fnUtils"; import useOperationInProgressCount from "../../../hooks/useOperationInProgressCount"; +import type { TRow } from "./types"; -export interface GroupedDataTableProps { +export interface GroupedDataTableProps< + TGroups extends string[], + TData extends TRow, +> { data: TData[]; // eslint-disable-next-line @typescript-eslint/no-explicit-any columns: Array>; - groups: string[] | readonly string[]; - onCreate?: (values: TData) => Promise; + groups: TGroups; + onCreate?: (values: TRow) => Promise; onDelete?: (rows: TData[]) => PromiseAny | void; onNameClick?: (row: MRT_Row) => void; isLoading?: boolean; @@ -48,7 +52,10 @@ export interface GroupedDataTableProps { const GROUP_COLUMN_ID = "_group"; const NAME_COLUMN_ID = "_name"; -function GroupedDataTable({ +function GroupedDataTable< + TGroups extends string[], + TData extends TRow, +>({ data, columns, groups, @@ -57,7 +64,7 @@ function GroupedDataTable({ onNameClick, isLoading, deleteConfirmationMessage, -}: GroupedDataTableProps) { +}: GroupedDataTableProps) { const { t } = useTranslation(); const [openDialog, setOpenDialog] = useState< "add" | "duplicate" | "delete" | "" @@ -68,7 +75,8 @@ function GroupedDataTable({ // Allow to use the last version of `onNameClick` in `tableColumns` const callbacksRef = useAutoUpdateRef({ onNameClick }); const prevData = usePrevious(data); - const { deleteOps, totalOps } = useOperationInProgressCount(); + const pendingRows = useRef>>([]); + const { createOps, deleteOps, totalOps } = useOperationInProgressCount(); // Update once `data` only if previous value was empty. // It allows to handle loading data. @@ -104,23 +112,52 @@ function GroupedDataTable({ filterSelectOptions: existingNames, Cell: callbacksRef.current.onNameClick && - (({ renderedCellValue, row }) => ( - callbacksRef.current.onNameClick?.(row)} - > - {renderedCellValue} - - )), + (({ renderedCellValue, row }) => { + if (isPendingRow(row.original)) { + return renderedCellValue; + } + + return ( + callbacksRef.current.onNameClick?.(row)} + > + {renderedCellValue} + + ); + }), ...getTableOptionsForAlign("left"), }, - ...columns, + ...columns.map( + (column) => + ({ + ...column, + Cell: (props) => { + const { row, renderedCellValue } = props; + // Use JSX instead of call it directly to remove React warning: + // 'Warning: Internal React error: Expected static flag was missing.' + const CellComp = column.Cell; + + if (isPendingRow(row.original)) { + return ( + + ); + } + + return CellComp ? : renderedCellValue; + }, + }) as MRT_ColumnDef, + ), ], // eslint-disable-next-line react-hooks/exhaustive-deps [columns, t, ...groups], @@ -145,27 +182,35 @@ function GroupedDataTable({ enablePagination: false, positionToolbarAlertBanner: "none", // Rows - muiTableBodyRowProps: ({ row }) => ({ - onClick: () => { - const isGrouped = row.getIsGrouped(); - const rowIds = isGrouped - ? row.getLeafRows().map((r) => r.id) - : [row.id]; + muiTableBodyRowProps: ({ row }) => { + const isPending = isPendingRow(row.original); + + return { + onClick: () => { + if (isPending) { + return; + } + + const isGrouped = row.getIsGrouped(); + const rowIds = isGrouped + ? row.getLeafRows().map((r) => r.id) + : [row.id]; setRowSelection((prev) => { const newValue = isGrouped ? !rowIds.some((id) => prev[id]) // Select/Deselect all : !prev[row.id]; - return { - ...prev, - ...rowIds.reduce((acc, id) => ({ ...acc, [id]: newValue }), {}), - }; - }); - }, - selected: rowSelection[row.id], - sx: { cursor: "pointer" }, - }), + return { + ...prev, + ...rowIds.reduce((acc, id) => ({ ...acc, [id]: newValue }), {}), + }; + }); + }, + selected: rowSelection[row.id], + sx: { cursor: isPending ? "wait" : "pointer" }, + }; + }, // Toolbars renderTopToolbarCustomActions: ({ table }) => ( @@ -212,6 +257,7 @@ function GroupedDataTable({ ), onRowSelectionChange: setRowSelection, // Styles + muiTablePaperProps: { sx: { display: "flex", flexDirection: "column" } }, // Allow to have scroll ...R.mergeDeepRight(getTableOptionsForAlign("right"), { muiTableBodyCellProps: { sx: { borderBottom: "1px solid rgba(224, 224, 224, 0.3)" }, @@ -224,6 +270,25 @@ function GroupedDataTable({ .rows.map((row) => row.original); const selectedRow = selectedRows.length === 1 ? selectedRows[0] : null; + //////////////////////////////////////////////////////////////// + // Optimistic + //////////////////////////////////////////////////////////////// + + const addPendingRow = (row: TRow) => { + pendingRows.current.push(row); + // Type can be asserted as `TData` because the row will be checked in cell renders + setTableData((prev) => [...prev, row as TData]); + }; + + const removePendingRow = (row: TRow) => { + pendingRows.current = pendingRows.current.filter((r) => r !== row); + setTableData((prev) => prev.filter((r) => r !== row)); + }; + + function isPendingRow(row: TData) { + return pendingRows.current.includes(row); + } + //////////////////////////////////////////////////////////////// // Utils //////////////////////////////////////////////////////////////// @@ -234,11 +299,25 @@ function GroupedDataTable({ // Event Handlers //////////////////////////////////////////////////////////////// - const handleCreate = async (values: TData) => { - if (onCreate) { + const handleCreate = async (values: TRow) => { + closeDialog(); + + if (!onCreate) { + return; + } + + createOps.increment(); + addPendingRow(values); + + try { const newRow = await onCreate(values); - setTableData((prevTableData) => [...prevTableData, newRow]); + setTableData((prev) => [...prev, newRow]); + } catch (error) { + enqueueErrorSnackbar(t("global.error.create"), toError(error)); } + + removePendingRow(values); + createOps.decrement(); }; const handleDelete = async () => { @@ -273,7 +352,7 @@ function GroupedDataTable({ return; } - const id = generateUniqueValue("id", name, tableData); + const id = generateUniqueValue(name, tableData); const duplicatedRow = { ...selectedRow, @@ -310,7 +389,7 @@ function GroupedDataTable({ onClose={closeDialog} onSubmit={handleDuplicate} existingNames={existingNames} - defaultName={generateUniqueValue("name", selectedRow.name, tableData)} + defaultName={generateUniqueValue(selectedRow.name, tableData)} /> )} {openDialog === "delete" && ( diff --git a/webapp/src/components/common/GroupedDataTable/types.ts b/webapp/src/components/common/GroupedDataTable/types.ts new file mode 100644 index 0000000000..6f91852cb4 --- /dev/null +++ b/webapp/src/components/common/GroupedDataTable/types.ts @@ -0,0 +1,4 @@ +export interface TRow { + name: string; + group: T; +} diff --git a/webapp/src/components/common/GroupedDataTable/utils.ts b/webapp/src/components/common/GroupedDataTable/utils.ts index b861cfb83d..f209d83bae 100644 --- a/webapp/src/components/common/GroupedDataTable/utils.ts +++ b/webapp/src/components/common/GroupedDataTable/utils.ts @@ -1,16 +1,6 @@ import * as R from "ramda"; -import { nameToId } from "../../../services/utils"; import { TableCellProps } from "@mui/material"; - -//////////////////////////////////////////////////////////////// -// Types -//////////////////////////////////////////////////////////////// - -export interface TRow { - id: string; - name: string; - group: string; -} +import type { TRow } from "./types"; //////////////////////////////////////////////////////////////// // Functions @@ -59,26 +49,16 @@ export const generateNextValue = ( * * This function leverages `generateNextValue` to ensure the uniqueness of the value. * - * @param property - The property for which the unique value is generated, either "name" or "id". * @param originalValue - The original value of the specified property. * @param tableData - The existing table data to check against for ensuring uniqueness. * @returns A unique value for the specified property. */ export const generateUniqueValue = ( - property: "name" | "id", originalValue: string, tableData: TRow[], ): string => { - let baseValue: string; - - if (property === "name") { - baseValue = `${originalValue} - copy`; - } else { - baseValue = nameToId(originalValue); - } - - const existingValues = tableData.map((row) => row[property]); - return generateNextValue(baseValue, existingValues); + const existingValues = tableData.map((row) => row.name); + return generateNextValue(`${originalValue} - copy`, existingValues); }; export function getTableOptionsForAlign(align: TableCellProps["align"]) { diff --git a/webapp/src/utils/validationUtils.ts b/webapp/src/utils/validationUtils.ts index 94f1f95c30..9af316cbba 100644 --- a/webapp/src/utils/validationUtils.ts +++ b/webapp/src/utils/validationUtils.ts @@ -99,7 +99,7 @@ export function validateString( // Check for duplication against existing values. if (existingValues.map(normalize).includes(comparisonValue)) { - return t("form.field.duplicate", { 0: value }); + return t("form.field.duplicate"); } // Check for inclusion in the list of excluded values. From 7e71aa8ee4293f39615f69d1cc277aee14b215aa Mon Sep 17 00:00:00 2001 From: Samir Kamal <1954121+skamril@users.noreply.github.com> Date: Fri, 12 Apr 2024 11:17:15 +0200 Subject: [PATCH 044/147] feat(common-ui,clusters-ui): implement optimistic row duplication * use new endpoint to include matrices in duplication --- .../Modelization/Areas/Renewables/index.tsx | 16 ++++++ .../Modelization/Areas/Renewables/utils.ts | 19 ++++++- .../Modelization/Areas/Storages/index.tsx | 6 +++ .../Modelization/Areas/Storages/utils.ts | 19 ++++++- .../Modelization/Areas/Thermal/index.tsx | 16 ++++++ .../Modelization/Areas/Thermal/utils.ts | 19 ++++++- .../common/GroupedDataTable/index.tsx | 52 +++++++++++-------- 7 files changed, 120 insertions(+), 27 deletions(-) diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Renewables/index.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Renewables/index.tsx index 971984be02..1e1055aa66 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Renewables/index.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Renewables/index.tsx @@ -11,6 +11,7 @@ import { RenewableGroup, createRenewableCluster, deleteRenewableClusters, + duplicateRenewableCluster, getRenewableClusters, } from "./utils"; import useAppSelector from "../../../../../../../redux/hooks/useAppSelector"; @@ -113,6 +114,20 @@ function Renewables() { return addCapacity(cluster); }; + const handleDuplicate = async ( + row: RenewableClusterWithCapacity, + newName: string, + ) => { + const cluster = await duplicateRenewableCluster( + study.id, + areaId, + row.id, + newName, + ); + + return { ...row, ...cluster }; + }; + const handleDelete = (rows: RenewableClusterWithCapacity[]) => { const ids = rows.map((row) => row.id); return deleteRenewableClusters(study.id, areaId, ids); @@ -133,6 +148,7 @@ function Renewables() { columns={columns} groups={[...RENEWABLE_GROUPS]} onCreate={handleCreate} + onDuplicate={handleDuplicate} onDelete={handleDelete} onNameClick={handleNameClick} deleteConfirmationMessage={(count) => diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Renewables/utils.ts b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Renewables/utils.ts index 9ac4a6eb02..c20959e52b 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Renewables/utils.ts +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Renewables/utils.ts @@ -77,9 +77,10 @@ const getClusterUrl = ( async function makeRequest( method: "get" | "post" | "patch" | "delete", url: string, - data?: Partial | { data: Array }, + data?: Partial | { data: Array } | null, + params?: Record, ): Promise { - const res = await client[method](url, data); + const res = await client[method](url, data, params && { params }); return res.data; } @@ -129,6 +130,20 @@ export function createRenewableCluster( ); } +export function duplicateRenewableCluster( + studyId: StudyMetadata["id"], + areaId: Area["name"], + sourceClusterId: RenewableCluster["id"], + newName: RenewableCluster["name"], +) { + return makeRequest( + "post", + `/v1/studies/${studyId}/areas/${areaId}/renewables/${sourceClusterId}`, + null, + { newName }, + ); +} + export function deleteRenewableClusters( studyId: StudyMetadata["id"], areaId: Area["name"], diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/index.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/index.tsx index 66d188db3b..4cb41f145a 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/index.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/index.tsx @@ -14,6 +14,7 @@ import { createStorage, STORAGE_GROUPS, StorageGroup, + duplicateStorage, } from "./utils"; import usePromiseWithSnackbarError from "../../../../../../../hooks/usePromiseWithSnackbarError"; import type { TRow } from "../../../../../../common/GroupedDataTable/types"; @@ -162,6 +163,10 @@ function Storages() { return createStorage(study.id, areaId, values); }; + const handleDuplicate = (row: Storage, newName: string) => { + return duplicateStorage(study.id, areaId, row.id, newName); + }; + const handleDelete = (rows: Storage[]) => { const ids = rows.map((row) => row.id); return deleteStorages(study.id, areaId, ids); @@ -182,6 +187,7 @@ function Storages() { columns={columns} groups={[...STORAGE_GROUPS]} onCreate={handleCreate} + onDuplicate={handleDuplicate} onDelete={handleDelete} onNameClick={handleNameClick} deleteConfirmationMessage={(count) => diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/utils.ts b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/utils.ts index 6855ff566c..9709be679a 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/utils.ts +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/utils.ts @@ -54,9 +54,10 @@ const getStorageUrl = ( async function makeRequest( method: "get" | "post" | "patch" | "delete", url: string, - data?: Partial | { data: Array }, + data?: Partial | { data: Array } | null, + params?: Record, ): Promise { - const res = await client[method](url, data); + const res = await client[method](url, data, params && { params }); return res.data; } @@ -96,6 +97,20 @@ export function createStorage( return makeRequest("post", getStoragesUrl(studyId, areaId), data); } +export function duplicateStorage( + studyId: StudyMetadata["id"], + areaId: Area["name"], + sourceClusterId: Storage["id"], + newName: Storage["name"], +) { + return makeRequest( + "post", + `/v1/studies/${studyId}/areas/${areaId}/storages/${sourceClusterId}`, + null, + { newName }, + ); +} + export function deleteStorages( studyId: StudyMetadata["id"], areaId: Area["name"], diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/index.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/index.tsx index 19e1e0ec5a..f7742c30c9 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/index.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/index.tsx @@ -12,6 +12,7 @@ import { THERMAL_GROUPS, ThermalCluster, ThermalGroup, + duplicateThermalCluster, } from "./utils"; import useAppSelector from "../../../../../../../redux/hooks/useAppSelector"; import { getCurrentAreaId } from "../../../../../../../redux/selectors"; @@ -127,6 +128,20 @@ function Thermal() { return addCapacity(cluster); }; + const handleDuplicate = async ( + row: ThermalClusterWithCapacity, + newName: string, + ) => { + const cluster = await duplicateThermalCluster( + study.id, + areaId, + row.id, + newName, + ); + + return { ...row, ...cluster }; + }; + const handleDelete = (rows: ThermalClusterWithCapacity[]) => { const ids = rows.map((row) => row.id); return deleteThermalClusters(study.id, areaId, ids); @@ -147,6 +162,7 @@ function Thermal() { columns={columns} groups={[...THERMAL_GROUPS]} onCreate={handleCreate} + onDuplicate={handleDuplicate} onDelete={handleDelete} onNameClick={handleNameClick} deleteConfirmationMessage={(count) => diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/utils.ts b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/utils.ts index 730ce37db2..9bf625e360 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/utils.ts +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/utils.ts @@ -108,9 +108,10 @@ const getClusterUrl = ( async function makeRequest( method: "get" | "post" | "patch" | "delete", url: string, - data?: Partial | { data: Array }, + data?: Partial | { data: Array } | null, + params?: Record, ): Promise { - const res = await client[method](url, data); + const res = await client[method](url, data, params && { params }); return res.data; } @@ -157,6 +158,20 @@ export function createThermalCluster( ); } +export function duplicateThermalCluster( + studyId: StudyMetadata["id"], + areaId: Area["name"], + sourceClusterId: ThermalCluster["id"], + newName: ThermalCluster["name"], +) { + return makeRequest( + "post", + `/v1/studies/${studyId}/areas/${areaId}/thermals/${sourceClusterId}`, + null, + { newName }, + ); +} + export function deleteThermalClusters( studyId: StudyMetadata["id"], areaId: Area["name"], diff --git a/webapp/src/components/common/GroupedDataTable/index.tsx b/webapp/src/components/common/GroupedDataTable/index.tsx index 01fbd9ca6b..24fb987128 100644 --- a/webapp/src/components/common/GroupedDataTable/index.tsx +++ b/webapp/src/components/common/GroupedDataTable/index.tsx @@ -40,6 +40,7 @@ export interface GroupedDataTableProps< columns: Array>; groups: TGroups; onCreate?: (values: TRow) => Promise; + onDuplicate?: (row: TData, newName: string) => Promise; onDelete?: (rows: TData[]) => PromiseAny | void; onNameClick?: (row: MRT_Row) => void; isLoading?: boolean; @@ -60,6 +61,7 @@ function GroupedDataTable< columns, groups, onCreate, + onDuplicate, onDelete, onNameClick, isLoading, @@ -224,7 +226,7 @@ function GroupedDataTable< {t("button.add")} )} - {onCreate && ( + {onDuplicate && ( + )} + {onDuplicate && ( + + )} + {onDelete && ( + + )} + + ), + renderToolbarInternalActions: ({ table }) => ( + <> + + + + ), + onRowSelectionChange: setRowSelection, + // Styles + muiTablePaperProps: { sx: { display: "flex", flexDirection: "column" } }, // Allow to have scroll + ...R.mergeDeepRight(getTableOptionsForAlign("right"), { + muiTableBodyCellProps: { + sx: { borderBottom: "1px solid rgba(224, 224, 224, 0.3)" }, + }, + }), + }); + + const selectedRows = table + .getSelectedRowModel() + .rows.map((row) => row.original); + const selectedRow = selectedRows.length === 1 ? selectedRows[0] : null; + + //////////////////////////////////////////////////////////////// + // Optimistic + //////////////////////////////////////////////////////////////// + + const addPendingRow = (row: TRow) => { + const pendingRow = fillPendingRow?.(row) || row; + + pendingRows.current.push(pendingRow); + + // Type can be asserted as `TData` because the row will be checked in cell renders + // and `fillPendingRow` allows to add needed data + setTableData((prev) => [...prev, pendingRow as TData]); + + return pendingRow; + }; + + const removePendingRow = (row: TRow) => { + if (isPendingRow(row)) { + pendingRows.current = pendingRows.current.filter((r) => r !== row); + setTableData((prev) => prev.filter((r) => r !== row)); + } + }; + + function isPendingRow(row: TRow) { + return pendingRows.current.includes(row); + } + //////////////////////////////////////////////////////////////// // Utils //////////////////////////////////////////////////////////////// @@ -74,51 +308,80 @@ function GroupedDataTable({ // Event Handlers //////////////////////////////////////////////////////////////// - const handleCreate = async (values: TData) => { - if (onCreate) { - const newRow = await onCreate(values); - setTableData((prevTableData) => [...prevTableData, newRow]); - } - }; + const handleCreate = async (values: TRow) => { + closeDialog(); - const handleDelete = () => { - if (!onDelete) { + if (!onCreate) { return; } - const rowIndexes = Object.keys(rowSelection) - .map(Number) - // ignore groups names - .filter(Number.isInteger); + createOps.increment(); + const pendingRow = addPendingRow(values); - const rowIdsToDelete = rowIndexes.map((index) => tableData[index].id); + try { + const newRow = await onCreate(values); + setTableData((prev) => [...prev, newRow]); + } catch (error) { + enqueueErrorSnackbar(t("global.error.create"), toError(error)); + } - onDelete(rowIdsToDelete); - setTableData((prevTableData) => - prevTableData.filter((row) => !rowIdsToDelete.includes(row.id)), - ); - setRowSelection({}); - closeDialog(); + removePendingRow(pendingRow); + createOps.decrement(); }; - const handleDuplicate = async (name: string) => { - if (!selectedRow) { + const handleDuplicate = async (newName: string) => { + closeDialog(); + + if (!onDuplicate || !selectedRow) { return; } - const id = generateUniqueValue("id", name, tableData); + setRowSelection({}); const duplicatedRow = { ...selectedRow, - id, - name, + name: newName, }; - if (onCreate) { - const newRow = await onCreate(duplicatedRow); - setTableData((prevTableData) => [...prevTableData, newRow]); - setRowSelection({}); + createOps.increment(); + const pendingRow = addPendingRow(duplicatedRow); + + try { + const newRow = await onDuplicate(selectedRow, newName); + setTableData((prev) => [...prev, newRow]); + } catch (error) { + enqueueErrorSnackbar(t("global.error.create"), toError(error)); } + + removePendingRow(pendingRow); + createOps.decrement(); + }; + + const handleDelete = async () => { + closeDialog(); + + if (!onDelete) { + return; + } + + setRowSelection({}); + + const rowsToDelete = selectedRows; + + setTableData((prevTableData) => + prevTableData.filter((row) => !rowsToDelete.includes(row)), + ); + + deleteOps.increment(); + + try { + await onDelete(rowsToDelete); + } catch (error) { + enqueueErrorSnackbar(t("global.error.delete"), toError(error)); + setTableData((prevTableData) => [...prevTableData, ...rowsToDelete]); + } + + deleteOps.decrement(); }; //////////////////////////////////////////////////////////////// @@ -127,106 +390,7 @@ function GroupedDataTable({ return ( <> - { - const handleRowClick = () => { - // prevent group rows to be selected - if (groupingColumnId === undefined) { - setRowSelection((prev) => ({ - ...prev, - [id]: !prev[id], - })); - } - }; - - return { - onClick: handleRowClick, - selected: rowSelection[id], - sx: { - cursor: "pointer", - }, - }; - }} - state={{ rowSelection }} - enableColumnDragging={false} - enableColumnActions={false} - positionToolbarAlertBanner="none" - enableBottomToolbar={false} - enableStickyFooter - enableStickyHeader - enablePagination={false} - renderTopToolbarCustomActions={() => ( - - {onCreate && ( - - )} - - {onDelete && ( - - )} - - )} - renderToolbarInternalActions={({ table }) => ( - <> - - - - )} - muiTableHeadCellProps={{ - align: "right", - }} - muiTableBodyCellProps={{ - align: "right", - sx: { - borderBottom: "1px solid rgba(224, 224, 224, 0.3)", - }, - }} - muiTableFooterCellProps={{ - align: "right", - }} - muiTablePaperProps={{ - sx: { - width: 1, - display: "flex", - flexDirection: "column", - overflow: "auto", - }, - }} - /> + {openDialog === "add" && ( ({ onClose={closeDialog} onSubmit={handleDuplicate} existingNames={existingNames} - defaultName={generateUniqueValue("name", selectedRow.name, tableData)} + defaultName={generateUniqueValue(selectedRow.name, tableData)} /> )} {openDialog === "delete" && ( @@ -254,7 +418,9 @@ function GroupedDataTable({ onConfirm={handleDelete} alert="warning" > - {t("studies.modelization.clusters.question.delete")} + {RA.isFunction(deleteConfirmationMessage) + ? deleteConfirmationMessage(selectedRows.length) + : deleteConfirmationMessage ?? t("dialog.message.confirmDelete")} )} diff --git a/webapp/src/components/common/GroupedDataTable/types.ts b/webapp/src/components/common/GroupedDataTable/types.ts new file mode 100644 index 0000000000..6f91852cb4 --- /dev/null +++ b/webapp/src/components/common/GroupedDataTable/types.ts @@ -0,0 +1,4 @@ +export interface TRow { + name: string; + group: T; +} diff --git a/webapp/src/components/common/GroupedDataTable/utils.ts b/webapp/src/components/common/GroupedDataTable/utils.ts index aad96a3784..82673c3877 100644 --- a/webapp/src/components/common/GroupedDataTable/utils.ts +++ b/webapp/src/components/common/GroupedDataTable/utils.ts @@ -1,15 +1,6 @@ import * as R from "ramda"; -import { nameToId } from "../../../services/utils"; - -//////////////////////////////////////////////////////////////// -// Types -//////////////////////////////////////////////////////////////// - -export interface TRow { - id: string; - name: string; - group: string; -} +import { TableCellProps } from "@mui/material"; +import type { TRow } from "./types"; //////////////////////////////////////////////////////////////// // Functions @@ -58,24 +49,22 @@ export const generateNextValue = ( * * This function leverages generateNextValue to ensure the uniqueness of the value. * - * @param {"name" | "id"} property - The property for which the unique value is generated. - * @param {string} originalValue - The original value of the specified property. - * @param {TRow[]} tableData - The existing table data to check against. - * @returns {string} A unique value for the specified property. + * @param originalValue - The original value of the specified property. + * @param tableData - The existing table data to check against for ensuring uniqueness. + * @returns A unique value for the specified property. */ export const generateUniqueValue = ( - property: "name" | "id", originalValue: string, tableData: TRow[], ): string => { - let baseValue: string; - - if (property === "name") { - baseValue = `${originalValue} - copy`; - } else { - baseValue = nameToId(originalValue); - } - - const existingValues = tableData.map((row) => row[property]); - return generateNextValue(baseValue, existingValues); + const existingValues = tableData.map((row) => row.name); + return generateNextValue(`${originalValue} - copy`, existingValues); }; + +export function getTableOptionsForAlign(align: TableCellProps["align"]) { + return { + muiTableHeadCellProps: { align }, + muiTableBodyCellProps: { align }, + muiTableFooterCellProps: { align }, + }; +} diff --git a/webapp/src/hooks/useOperationInProgressCount.ts b/webapp/src/hooks/useOperationInProgressCount.ts new file mode 100644 index 0000000000..bc71fb677a --- /dev/null +++ b/webapp/src/hooks/useOperationInProgressCount.ts @@ -0,0 +1,51 @@ +import { useMemo, useState } from "react"; +import * as R from "ramda"; + +/** + * Hook to tracks the number of CRUD operations in progress. + * + * @returns An object containing methods to increment, decrement, + * and retrieve the count of each operation type. + */ +function useOperationInProgressCount() { + const [opsInProgressCount, setOpsInProgressCount] = useState({ + create: 0, + read: 0, + update: 0, + delete: 0, + }); + + const makeOperationMethods = ( + operation: keyof typeof opsInProgressCount, + ) => ({ + increment: (number = 1) => { + setOpsInProgressCount((prev) => ({ + ...prev, + [operation]: prev[operation] + number, + })); + }, + decrement: (number = 1) => { + setOpsInProgressCount((prev) => ({ + ...prev, + [operation]: Math.max(prev[operation] - number, 0), + })); + }, + total: opsInProgressCount[operation], + }); + + const methods = useMemo( + () => ({ + createOps: makeOperationMethods("create"), + readOps: makeOperationMethods("read"), + updateOps: makeOperationMethods("update"), + deleteOps: makeOperationMethods("delete"), + totalOps: Object.values(opsInProgressCount).reduce(R.add, 0), + }), + // eslint-disable-next-line react-hooks/exhaustive-deps + [opsInProgressCount], + ); + + return methods; +} + +export default useOperationInProgressCount; diff --git a/webapp/src/hooks/useUpdateEffectOnce.ts b/webapp/src/hooks/useUpdateEffectOnce.ts new file mode 100644 index 0000000000..61fedd115e --- /dev/null +++ b/webapp/src/hooks/useUpdateEffectOnce.ts @@ -0,0 +1,23 @@ +import { useEffect, useRef } from "react"; +import { useUpdateEffect } from "react-use"; + +/** + * Hook that runs the effect only at the first dependencies update. + * It behaves like the `useEffect` hook, but it skips the initial run, + * and the runs following the first update. + * + * @param effect - The effect function to run. + * @param deps - An array of dependencies to watch for changes. + */ +const useUpdateEffectOnce: typeof useEffect = (effect, deps) => { + const hasUpdated = useRef(false); + + useUpdateEffect(() => { + if (!hasUpdated.current) { + hasUpdated.current = true; + return effect(); + } + }, deps); +}; + +export default useUpdateEffectOnce; diff --git a/webapp/src/i18n.ts b/webapp/src/i18n.ts index d1d95a0574..980cffbf89 100644 --- a/webapp/src/i18n.ts +++ b/webapp/src/i18n.ts @@ -2,34 +2,35 @@ import i18n from "i18next"; import Backend from "i18next-http-backend"; import LanguageDetector from "i18next-browser-languagedetector"; import { initReactI18next } from "react-i18next"; +import { version } from "../package.json"; -export default function i18nInit(version = "unknown") { - i18n - // load translation using xhr -> see /public/locales - // learn more: https://github.com/i18next/i18next-xhr-backend - .use(Backend) - // detect user language - // learn more: https://github.com/i18next/i18next-browser-languageDetector - .use(LanguageDetector) - // pass the i18n instance to react-i18next. - .use(initReactI18next) - // init i18next - // for all options read: https://www.i18next.com/overview/configuration-options - .init({ - fallbackLng: "en", - backend: { - loadPath: `${ - import.meta.env.BASE_URL - }locales/{{lng}}/{{ns}}.json?v=${version}`, - }, - react: { - useSuspense: false, - }, - interpolation: { - escapeValue: false, // not needed for react as it escapes by default - }, - ns: ["main"], - defaultNS: "main", - returnNull: false, - }); -} +i18n + // load translation using xhr -> see /public/locales + // learn more: https://github.com/i18next/i18next-xhr-backend + .use(Backend) + // detect user language + // learn more: https://github.com/i18next/i18next-browser-languageDetector + .use(LanguageDetector) + // pass the i18n instance to react-i18next. + .use(initReactI18next) + // init i18next + // for all options read: https://www.i18next.com/overview/configuration-options + .init({ + fallbackLng: "en", + backend: { + loadPath: `${ + import.meta.env.BASE_URL + }locales/{{lng}}/{{ns}}.json?v=${version}`, + }, + react: { + useSuspense: false, + }, + interpolation: { + escapeValue: false, // not needed for react as it escapes by default + }, + ns: ["main"], + defaultNS: "main", + returnNull: false, + }); + +export default i18n; diff --git a/webapp/src/index.tsx b/webapp/src/index.tsx index 80dec85813..2c6792f7a8 100644 --- a/webapp/src/index.tsx +++ b/webapp/src/index.tsx @@ -1,7 +1,6 @@ import { createRoot } from "react-dom/client"; import { Provider } from "react-redux"; import { StyledEngineProvider } from "@mui/material"; -import i18nInit from "./i18n"; import "./index.css"; import App from "./components/App"; import { Config, initConfig } from "./services/config"; @@ -15,8 +14,6 @@ initConfig((config: Config) => { window.location.reload(); } - i18nInit(config.version.gitcommit); - const container = document.getElementById("root") as HTMLElement; const root = createRoot(container); diff --git a/webapp/src/utils/fnUtils.ts b/webapp/src/utils/fnUtils.ts index d232d83246..155078d711 100644 --- a/webapp/src/utils/fnUtils.ts +++ b/webapp/src/utils/fnUtils.ts @@ -4,3 +4,22 @@ export function voidFn(...args: TArgs) { // Do nothing } + +/** + * A utility function that converts an unknown value to an Error object. + * If the value is already an Error object, it is returned as is. + * If the value is a string, it is used as the message for the new Error object. + * If the value is anything else, a new Error object with a generic message is created. + * + * @param error - The value to convert to an Error object. + * @returns An Error object. + */ +export function toError(error: unknown) { + if (error instanceof Error) { + return error; + } + if (typeof error === "string") { + return new Error(error); + } + return new Error("An unknown error occurred"); +} diff --git a/webapp/src/utils/i18nUtils.ts b/webapp/src/utils/i18nUtils.ts new file mode 100644 index 0000000000..c613deab68 --- /dev/null +++ b/webapp/src/utils/i18nUtils.ts @@ -0,0 +1,22 @@ +import i18n from "../i18n"; + +/** + * Gets the current language used in the application. + * + * @returns The current language. + */ +export function getCurrentLanguage() { + return i18n.language; +} + +/** + * Translates the given key and appends a colon (:) at the end + * with the appropriate spacing for the current language. + * + * @param key - The translation key. + * @returns The translated string with a colon (:) appended. + */ +export function translateWithColon(key: string): string { + const lang = i18n.language; + return `${i18n.t(key)}${lang.startsWith("fr") ? " " : ""}:`; +} diff --git a/webapp/src/utils/tsUtils.ts b/webapp/src/utils/tsUtils.ts index eb60713aa8..7acf6465a2 100644 --- a/webapp/src/utils/tsUtils.ts +++ b/webapp/src/utils/tsUtils.ts @@ -1,3 +1,16 @@ +import { O } from "ts-toolbelt"; + +/** + * Allow to use `any` with `Promise` type without disabling ESLint rule. + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +export type PromiseAny = Promise; + +/** + * Make all properties in T optional, except for those specified by K. + */ +export type PartialExceptFor = O.Required, K>; + export function tuple(...items: T): T { return items; } diff --git a/webapp/src/utils/validationUtils.ts b/webapp/src/utils/validationUtils.ts index 94f1f95c30..9af316cbba 100644 --- a/webapp/src/utils/validationUtils.ts +++ b/webapp/src/utils/validationUtils.ts @@ -99,7 +99,7 @@ export function validateString( // Check for duplication against existing values. if (existingValues.map(normalize).includes(comparisonValue)) { - return t("form.field.duplicate", { 0: value }); + return t("form.field.duplicate"); } // Check for inclusion in the list of excluded values. From e447eb15a287a83fc03363b4968a9dbcd7c0aa2b Mon Sep 17 00:00:00 2001 From: Samir Kamal <1954121+skamril@users.noreply.github.com> Date: Fri, 19 Apr 2024 19:39:24 +0200 Subject: [PATCH 069/147] build: update CHANGELOG --- docs/CHANGELOG.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docs/CHANGELOG.md b/docs/CHANGELOG.md index 841e0ed153..3ab8134941 100644 --- a/docs/CHANGELOG.md +++ b/docs/CHANGELOG.md @@ -7,11 +7,15 @@ v2.16.8 (2024-04-19) ### Features * **clusters:** add new endpoint for clusters duplication [`#1972`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1972) +* **clusters (ui):** implement new duplication endpoint and optimistic update [`#1984`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1984) * **configuration:** turn Thematic Trimming variable names in upper case * **configuration (ui):** replace underscore with space in Thematic Trimming variable names [`#2010`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/2010) +* **ui:** enhance and refactor validation across UI components [`#1956`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1956) ### Bug Fixes +* **clusters (ui):** totals are updated after a duplication and a deletion [`#1984`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1984) +* **clusters (ui):** issue with selecting and deleting rows [`#1984`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1984) * **st-storages (ui):** correction of incorrect wording between "withdrawal" and "injection" [`#1977`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1977) * **st-storages (ui):** change matrix titles [`#1994`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1994) * **st-storages:** use command when updating matrices [`#1971`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1971) From 84f9be13cbf27c34ae0145794fbbb836a83a5a0c Mon Sep 17 00:00:00 2001 From: Samir Kamal <1954121+skamril@users.noreply.github.com> Date: Wed, 29 Nov 2023 10:11:08 +0100 Subject: [PATCH 070/147] feat(tablemode): update the manager and add thermal pollutants missing --- .../study/business/table_mode_management.py | 507 ++++++++++++------ .../filesystem/root/input/hydro/hydro_ini.py | 1 + tests/integration/test_integration.py | 28 +- 3 files changed, 360 insertions(+), 176 deletions(-) diff --git a/antarest/study/business/table_mode_management.py b/antarest/study/business/table_mode_management.py index 23d8674781..d96c70c269 100644 --- a/antarest/study/business/table_mode_management.py +++ b/antarest/study/business/table_mode_management.py @@ -1,13 +1,10 @@ -from typing import Any, Dict, List, Optional, TypedDict, Union - -from pydantic import StrictFloat -from pydantic.types import StrictBool, StrictInt, StrictStr - +from typing import Any, Dict, List, Optional, TypedDict, Union, cast +from pydantic import Field from antarest.study.business.areas.properties_management import AdequacyPatchMode from antarest.study.business.areas.renewable_management import TimeSeriesInterpretation from antarest.study.business.binding_constraint_management import BindingConstraintManager from antarest.study.business.enum_ignore_case import EnumIgnoreCase -from antarest.study.business.utils import FormFieldsBaseModel, execute_or_add_commands +from antarest.study.business.utils import FormFieldsBaseModel, execute_or_add_commands, AllOptionalMetaclass from antarest.study.common.default_values import FilteringOptions, LinkProperties, NodalOptimization from antarest.study.model import RawStudy from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import BindingConstraintFrequency @@ -18,12 +15,22 @@ from antarest.study.storage.variantstudy.model.command.update_binding_constraint import UpdateBindingConstraint from antarest.study.storage.variantstudy.model.command.update_config import UpdateConfig +AREA_PATH = "input/areas/{area}" +THERMAL_PATH = "input/thermal/areas" +LINK_GLOB_PATH = "input/links/{area1}/properties" +LINK_PATH = f"{LINK_GLOB_PATH}/{{area2}}" +THERMAL_CLUSTER_GLOB_PATH = "input/thermal/clusters/{area}/list" +THERMAL_CLUSTER_PATH = f"{THERMAL_CLUSTER_GLOB_PATH}/{{cluster}}" +RENEWABLE_CLUSTER_GLOB_PATH = "input/renewables/clusters/{area}/list" +RENEWABLE_CLUSTER_PATH = f"{RENEWABLE_CLUSTER_GLOB_PATH}/{{cluster}}" +BINDING_CONSTRAINT_PATH = "input/bindingconstraints/bindingconstraints" + class TableTemplateType(EnumIgnoreCase): AREA = "area" LINK = "link" - CLUSTER = "cluster" - RENEWABLE = "renewable" + THERMAL_CLUSTER = "thermal cluster" + RENEWABLE_CLUSTER = "renewable cluster" BINDING_CONSTRAINT = "binding constraint" @@ -48,71 +55,240 @@ class BindingConstraintOperator(EnumIgnoreCase): EQUAL = "equal" -class AreaColumns(FormFieldsBaseModel): +class AreaColumns(FormFieldsBaseModel, metaclass=AllOptionalMetaclass): # Optimization - Nodal optimization - non_dispatchable_power: Optional[StrictBool] - dispatchable_hydro_power: Optional[StrictBool] - other_dispatchable_power: Optional[StrictBool] - average_unsupplied_energy_cost: Optional[Union[StrictFloat, StrictInt]] - spread_unsupplied_energy_cost: Optional[Union[StrictFloat, StrictInt]] - average_spilled_energy_cost: Optional[Union[StrictFloat, StrictInt]] - spread_spilled_energy_cost: Optional[Union[StrictFloat, StrictInt]] + non_dispatchable_power: bool = Field( + default=NodalOptimization.NON_DISPATCHABLE_POWER, + path=f"{AREA_PATH}/optimization/nodal optimization/non-dispatchable-power", + ) + dispatchable_hydro_power: bool = Field( + default=NodalOptimization.DISPATCHABLE_HYDRO_POWER, + path=f"{AREA_PATH}/optimization/nodal optimization/dispatchable-hydro-power", + ) + other_dispatchable_power: bool = Field( + default=NodalOptimization.OTHER_DISPATCHABLE_POWER, + path=f"{AREA_PATH}/optimization/nodal optimization/other-dispatchable-power", + ) + average_unsupplied_energy_cost: float = Field( + default=NodalOptimization.SPREAD_UNSUPPLIED_ENERGY_COST, + path=f"{THERMAL_PATH}/unserverdenergycost/{{area}}", + ) + spread_unsupplied_energy_cost: float = Field( + default=NodalOptimization.SPREAD_UNSUPPLIED_ENERGY_COST, + path=f"{AREA_PATH}/optimization/nodal optimization/spread-unsupplied-energy-cost", + ) + average_spilled_energy_cost: float = Field( + default=NodalOptimization.SPREAD_SPILLED_ENERGY_COST, + path=f"{THERMAL_PATH}/spilledenergycost/{{area}}", + ) + spread_spilled_energy_cost: float = Field( + default=NodalOptimization.SPREAD_SPILLED_ENERGY_COST, + path=f"{AREA_PATH}/optimization/nodal optimization/spread-spilled-energy-cost", + ) # Optimization - Filtering - filter_synthesis: Optional[StrictStr] - filter_year_by_year: Optional[StrictStr] + filter_synthesis: str = Field( + default=FilteringOptions.FILTER_SYNTHESIS, + path=f"{AREA_PATH}/optimization/filtering/filter-synthesis", + ) + filter_year_by_year: str = Field( + default=FilteringOptions.FILTER_YEAR_BY_YEAR, + path=f"{AREA_PATH}/optimization/filtering/filter-year-by-year", + ) # Adequacy patch - adequacy_patch_mode: Optional[AdequacyPatchMode] - - -class LinkColumns(FormFieldsBaseModel): - hurdles_cost: Optional[StrictBool] - loop_flow: Optional[StrictBool] - use_phase_shifter: Optional[StrictBool] - transmission_capacities: Optional[TransmissionCapacity] - asset_type: Optional[AssetType] - link_style: Optional[StrictStr] - link_width: Optional[StrictInt] - display_comments: Optional[StrictBool] - filter_synthesis: Optional[StrictStr] - filter_year_by_year: Optional[StrictStr] - - -class ClusterColumns(FormFieldsBaseModel): - group: Optional[StrictStr] - enabled: Optional[StrictBool] - must_run: Optional[StrictBool] - unit_count: Optional[StrictInt] - nominal_capacity: Optional[StrictInt] - min_stable_power: Optional[StrictInt] - spinning: Optional[StrictInt] - min_up_time: Optional[StrictInt] - min_down_time: Optional[StrictInt] - co2: Optional[StrictInt] - marginal_cost: Optional[StrictInt] - fixed_cost: Optional[StrictInt] - startup_cost: Optional[StrictInt] - market_bid_cost: Optional[StrictInt] - spread_cost: Optional[StrictInt] - ts_gen: Optional[LocalTSGenerationBehavior] - volatility_forced: Optional[StrictInt] - volatility_planned: Optional[StrictInt] - law_forced: Optional[LawOption] - law_planned: Optional[LawOption] - - -class RenewableColumns(FormFieldsBaseModel): - group: Optional[StrictStr] - ts_interpretation: Optional[TimeSeriesInterpretation] - enabled: Optional[StrictBool] - unit_count: Optional[StrictInt] - nominal_capacity: Optional[StrictInt] - - -class BindingConstraintColumns(FormFieldsBaseModel): - type: Optional[BindingConstraintFrequency] - operator: Optional[BindingConstraintOperator] - enabled: Optional[StrictBool] - group: Optional[StrictStr] + adequacy_patch_mode: AdequacyPatchMode = Field( + default=AdequacyPatchMode.OUTSIDE.value, + path=f"{AREA_PATH}/adequacy_patch/adequacy-patch/adequacy-patch-mode", + ) + + +class LinkColumns(FormFieldsBaseModel, metaclass=AllOptionalMetaclass): + hurdles_cost: bool = Field(default=LinkProperties.HURDLES_COST, path=f"{LINK_PATH}/hurdles-cost") + loop_flow: bool = Field(default=LinkProperties.LOOP_FLOW, path=f"{LINK_PATH}/loop-flow") + use_phase_shifter: bool = Field( + default=LinkProperties.USE_PHASE_SHIFTER, + path=f"{LINK_PATH}/use-phase-shifter", + ) + transmission_capacities: TransmissionCapacity = Field( + default=LinkProperties.TRANSMISSION_CAPACITIES, + path=f"{LINK_PATH}/transmission-capacities", + ) + asset_type: AssetType = Field(default=LinkProperties.ASSET_TYPE, path=f"{LINK_PATH}/asset-type") + link_style: str = Field(default=LinkProperties.LINK_STYLE, path=f"{LINK_PATH}/link-style") + link_width: int = Field(default=LinkProperties.LINK_WIDTH, path=f"{LINK_PATH}/link-width") + display_comments: bool = Field( + default=LinkProperties.DISPLAY_COMMENTS, + path=f"{LINK_PATH}/display-comments", + ) + filter_synthesis: str = Field( + default=FilteringOptions.FILTER_SYNTHESIS, + path=f"{LINK_PATH}/filter-synthesis", + ) + filter_year_by_year: str = Field( + default=FilteringOptions.FILTER_YEAR_BY_YEAR, + path=f"{LINK_PATH}/filter-year-by-year", + ) + + +class ThermalClusterColumns(FormFieldsBaseModel, metaclass=AllOptionalMetaclass): + group: str = Field( + default="", + path=f"{THERMAL_CLUSTER_PATH}/group", + ) + enabled: bool = Field( + default=True, + path=f"{THERMAL_CLUSTER_PATH}/enabled", + ) + must_run: bool = Field( + default=False, + path=f"{THERMAL_CLUSTER_PATH}/must-run", + ) + unit_count: int = Field( + default=0, + path=f"{THERMAL_CLUSTER_PATH}/unitcount", + ) + nominal_capacity: int = Field( + default=0, + path=f"{THERMAL_CLUSTER_PATH}/nominalcapacity", + ) + min_stable_power: int = Field( + default=0, + path=f"{THERMAL_CLUSTER_PATH}/min-stable-power", + ) + spinning: int = Field( + default=0, + path=f"{THERMAL_CLUSTER_PATH}/spinning", + ) + min_up_time: int = Field( + default=1, + path=f"{THERMAL_CLUSTER_PATH}/min-up-time", + ) + min_down_time: int = Field( + default=1, + path=f"{THERMAL_CLUSTER_PATH}/min-down-time", + ) + marginal_cost: int = Field( + default=0, + path=f"{THERMAL_CLUSTER_PATH}/marginal-cost", + ) + fixed_cost: int = Field( + default=0, + path=f"{THERMAL_CLUSTER_PATH}/fixed-cost", + ) + startup_cost: int = Field( + default=0, + path=f"{THERMAL_CLUSTER_PATH}/startup-cost", + ) + market_bid_cost: int = Field( + default=0, + path=f"{THERMAL_CLUSTER_PATH}/market-bid-cost", + ) + spread_cost: int = Field( + default=0, + path=f"{THERMAL_CLUSTER_PATH}/spread-cost", + ) + ts_gen: LocalTSGenerationBehavior = Field( + default=LocalTSGenerationBehavior.USE_GLOBAL_PARAMETER.value, + path=f"{THERMAL_CLUSTER_PATH}/gen-ts", + ) + volatility_forced: int = Field( + default=0, + path=f"{THERMAL_CLUSTER_PATH}/volatility.forced", + ) + volatility_planned: int = Field( + default=0, + path=f"{THERMAL_CLUSTER_PATH}/volatility.planned", + ) + law_forced: LawOption = Field( + default=LawOption.UNIFORM.value, + path=f"{THERMAL_CLUSTER_PATH}/law.forced", + ) + law_planned: LawOption = Field( + default=LawOption.UNIFORM.value, + path=f"{THERMAL_CLUSTER_PATH}/law.planned", + ) + # Pollutants + co2: float = Field( + default=0.0, + path=f"{THERMAL_CLUSTER_PATH}/co2", + ) + so2: float = Field( + default=0.0, + path=f"{THERMAL_CLUSTER_PATH}/so2", + ) + nh3: float = Field( + default=0.0, + path=f"{THERMAL_CLUSTER_PATH}/nh3", + ) + nox: float = Field( + default=0.0, + path=f"{THERMAL_CLUSTER_PATH}/nox", + ) + nmvoc: float = Field( + default=0.0, + path=f"{THERMAL_CLUSTER_PATH}/nmvoc", + ) + pm25: float = Field( + default=0.0, + path=f"{THERMAL_CLUSTER_PATH}/pm2_5", + ) + pm5: float = Field( + default=0.0, + path=f"{THERMAL_CLUSTER_PATH}/pm5", + ) + pm10: float = Field( + default=0.0, + path=f"{THERMAL_CLUSTER_PATH}/pm10", + ) + op1: float = Field( + default=0.0, + path=f"{THERMAL_CLUSTER_PATH}/op1", + ) + op2: float = Field( + default=0.0, + path=f"{THERMAL_CLUSTER_PATH}/op2", + ) + op3: float = Field( + default=0.0, + path=f"{THERMAL_CLUSTER_PATH}/op3", + ) + op4: float = Field( + default=0.0, + path=f"{THERMAL_CLUSTER_PATH}/op4", + ) + op5: float = Field( + default=0.0, + path=f"{THERMAL_CLUSTER_PATH}/op5", + ) + + +class RenewableClusterColumns(FormFieldsBaseModel, metaclass=AllOptionalMetaclass): + group: str = Field(default="", path=f"{RENEWABLE_CLUSTER_PATH}/group") + ts_interpretation: TimeSeriesInterpretation = Field( + default=TimeSeriesInterpretation.POWER_GENERATION.value, + path=f"{RENEWABLE_CLUSTER_PATH}/ts-interpretation", + ) + enabled: bool = Field(default=True, path=f"{RENEWABLE_CLUSTER_PATH}/enabled") + unit_count: int = Field(default=0, path=f"{RENEWABLE_CLUSTER_PATH}/unitcount") + nominal_capacity: int = Field(default=0, path=f"{RENEWABLE_CLUSTER_PATH}/nominalcapacity") + + +class BindingConstraintColumns(FormFieldsBaseModel, metaclass=AllOptionalMetaclass): + type: BindingConstraintFrequency = Field( + default=BindingConstraintFrequency.HOURLY.value, + path=f"{BINDING_CONSTRAINT_PATH}/type", + ) + operator: BindingConstraintOperator = Field( + default=BindingConstraintOperator.LESS.value, + path=f"{BINDING_CONSTRAINT_PATH}/operator", + ) + enabled: bool = Field( + default=True, + path=f"{BINDING_CONSTRAINT_PATH}/enabled", + ) + group: Optional[str] = Field( + default="default", + path=f"{BINDING_CONSTRAINT_PATH}/group", + ) class ColumnInfo(TypedDict): @@ -126,7 +302,7 @@ class PathVars(TypedDict, total=False): # Link area1: str area2: str - # Cluster, Renewable + # Thermal cluster, Renewable cluster area: str cluster: str @@ -353,16 +529,16 @@ class PathVars(TypedDict, total=False): COLUMNS_MODELS_BY_TYPE = { TableTemplateType.AREA: AreaColumns, TableTemplateType.LINK: LinkColumns, - TableTemplateType.CLUSTER: ClusterColumns, - TableTemplateType.RENEWABLE: RenewableColumns, + TableTemplateType.THERMAL_CLUSTER: ThermalClusterColumns, + TableTemplateType.RENEWABLE_CLUSTER: RenewableClusterColumns, TableTemplateType.BINDING_CONSTRAINT: BindingConstraintColumns, } ColumnsModelTypes = Union[ AreaColumns, LinkColumns, - ClusterColumns, - RenewableColumns, + ThermalClusterColumns, + RenewableClusterColumns, BindingConstraintColumns, ] @@ -400,14 +576,91 @@ def _get_glob_object(file_study: FileStudy, table_type: TableTemplateType) -> Di info_map[area_id][field] = value return info_map - url = { - TableTemplateType.LINK: LINK_GLOB_PATH.format(area1="*").split("/"), - TableTemplateType.CLUSTER: CLUSTER_GLOB_PATH.format(area="*").split("/"), - TableTemplateType.RENEWABLE: RENEWABLE_GLOB_PATH.format(area="*").split("/"), - TableTemplateType.BINDING_CONSTRAINT: BINDING_CONSTRAINT_PATH.split("/"), - }[table_type] + if table_type == TableTemplateType.LINK: + return file_study.tree.get(LINK_GLOB_PATH.format(area1="*").split("/")) + if table_type == TableTemplateType.THERMAL_CLUSTER: + return file_study.tree.get(THERMAL_CLUSTER_GLOB_PATH.format(area="*").split("/")) + if table_type == TableTemplateType.RENEWABLE_CLUSTER: + return file_study.tree.get(RENEWABLE_CLUSTER_GLOB_PATH.format(area="*").split("/")) + if table_type == TableTemplateType.BINDING_CONSTRAINT: + return file_study.tree.get(BINDING_CONSTRAINT_PATH.split("/")) + + return {} + + +def _get_value(path: List[str], data: Dict[str, Any], default_value: Any) -> Any: + if len(path): + return _get_value(path[1:], data.get(path[0], {}), default_value) + return data if data != {} else default_value + + +def _get_relative_path( + table_type: TableTemplateType, + path: str, +) -> List[str]: + base_path = "" + path_arr = path.split("/") - return file_study.tree.get(url) + if table_type == TableTemplateType.AREA: + if path.startswith(THERMAL_PATH): + base_path = THERMAL_PATH + # Remove {area} + path_arr = path_arr[:-1] + else: + base_path = AREA_PATH + elif table_type == TableTemplateType.LINK: + base_path = LINK_PATH + elif table_type == TableTemplateType.THERMAL_CLUSTER: + base_path = THERMAL_CLUSTER_PATH + elif table_type == TableTemplateType.RENEWABLE_CLUSTER: + base_path = RENEWABLE_CLUSTER_PATH + elif table_type == TableTemplateType.BINDING_CONSTRAINT: + base_path = BINDING_CONSTRAINT_PATH + + return path_arr[len(base_path.split("/")) :] + + +def _get_column_path( + table_type: TableTemplateType, + column: str, + path_vars: PathVars, +) -> str: + columns_model = COLUMNS_MODELS_BY_TYPE[table_type] + path = cast(str, columns_model.__fields__[column].field_info.extra["path"]) + + if table_type == TableTemplateType.AREA: + return path.format(area=path_vars["id"]) + if table_type == TableTemplateType.LINK: + return path.format(area1=path_vars["area1"], area2=path_vars["area2"]) + if table_type in [ + TableTemplateType.THERMAL_CLUSTER, + TableTemplateType.RENEWABLE_CLUSTER, + ]: + return path.format(area=path_vars["area"], cluster=path_vars["cluster"]) + + return path + + +def _get_path_vars_from_key( + table_type: TableTemplateType, + key: str, +) -> PathVars: + if table_type in [ + TableTemplateType.AREA, + TableTemplateType.BINDING_CONSTRAINT, + ]: + return PathVars(id=key) + if table_type == TableTemplateType.LINK: + area1, area2 = [v.strip() for v in key.split("/")] + return PathVars(area1=area1, area2=area2) + if table_type in [ + TableTemplateType.THERMAL_CLUSTER, + TableTemplateType.RENEWABLE_CLUSTER, + ]: + area, cluster = [v.strip() for v in key.split("/")] + return PathVars(area=area, cluster=cluster) + + return PathVars() class TableModeManager: @@ -422,17 +675,13 @@ def get_table_data( ) -> Dict[str, ColumnsModelTypes]: file_study = self.storage_service.get_storage(study).get_raw(study) columns_model = COLUMNS_MODELS_BY_TYPE[table_type] - fields_info = FIELDS_INFO_BY_TYPE[table_type] glob_object = _get_glob_object(file_study, table_type) + schema_columns = columns_model.schema()["properties"] def get_column_value(col: str, data: Dict[str, Any]) -> Any: - f_info = fields_info[col] - relative_path = TableModeManager.__get_relative_path(f_info["path"], table_type) - return TableModeManager.__get_value( - relative_path, - data, - f_info["default_value"], - ) + schema = schema_columns[col] + relative_path = _get_relative_path(table_type, schema["path"]) + return _get_value(relative_path, data, schema["default"]) if table_type == TableTemplateType.AREA: return { @@ -466,7 +715,7 @@ def set_table_data( command_context = self.storage_service.variant_study_service.command_factory.command_context for key, columns in data.items(): - path_vars = TableModeManager.__get_path_vars_from_key(table_type, key) + path_vars = _get_path_vars_from_key(table_type, key) if table_type == TableTemplateType.BINDING_CONSTRAINT: file_study = self.storage_service.get_storage(study).get_raw(study) @@ -497,7 +746,7 @@ def set_table_data( if val is not None: commands.append( UpdateConfig( - target=TableModeManager.__get_column_path(table_type, path_vars, col), + target=_get_column_path(table_type, col, path_vars), data=val, command_context=command_context, ) @@ -506,77 +755,3 @@ def set_table_data( if commands: file_study = self.storage_service.get_storage(study).get_raw(study) execute_or_add_commands(study, file_study, commands, self.storage_service) - - @staticmethod - def __get_value(path: List[str], data: Dict[str, Any], default_value: Any) -> Any: - if len(path): - return TableModeManager.__get_value(path[1:], data.get(path[0], {}), default_value) - return data if data != {} else default_value - - @staticmethod - def __get_relative_path( - path: str, - table_type: TableTemplateType, - ) -> List[str]: - base_path = "" - path_arr = path.split("/") - - if table_type == TableTemplateType.AREA: - if path.startswith(THERMAL_PATH): - base_path = THERMAL_PATH - # Remove {area} - path_arr = path_arr[:-1] - else: - base_path = AREA_PATH - elif table_type == TableTemplateType.LINK: - base_path = LINK_PATH - elif table_type == TableTemplateType.CLUSTER: - base_path = CLUSTER_PATH - elif table_type == TableTemplateType.RENEWABLE: - base_path = RENEWABLE_PATH - elif table_type == TableTemplateType.BINDING_CONSTRAINT: - base_path = BINDING_CONSTRAINT_PATH - - return path_arr[len(base_path.split("/")) :] - - @staticmethod - def __get_column_path( - table_type: TableTemplateType, - path_vars: PathVars, - column: str, - ) -> str: - path = FIELDS_INFO_BY_TYPE[table_type][column]["path"] - - if table_type == TableTemplateType.AREA: - return path.format(area=path_vars["id"]) - if table_type == TableTemplateType.LINK: - return path.format(area1=path_vars["area1"], area2=path_vars["area2"]) - if table_type in [ - TableTemplateType.CLUSTER, - TableTemplateType.RENEWABLE, - ]: - return path.format(area=path_vars["area"], cluster=path_vars["cluster"]) - - return path - - @staticmethod - def __get_path_vars_from_key( - table_type: TableTemplateType, - key: str, - ) -> PathVars: - if table_type in [ - TableTemplateType.AREA, - TableTemplateType.BINDING_CONSTRAINT, - ]: - return PathVars(id=key) - if table_type == TableTemplateType.LINK: - area1, area2 = [v.strip() for v in key.split("/")] - return PathVars(area1=area1, area2=area2) - if table_type in [ - TableTemplateType.CLUSTER, - TableTemplateType.RENEWABLE, - ]: - area, cluster = [v.strip() for v in key.split("/")] - return PathVars(area=area, cluster=cluster) - - return PathVars() diff --git a/antarest/study/storage/rawstudy/model/filesystem/root/input/hydro/hydro_ini.py b/antarest/study/storage/rawstudy/model/filesystem/root/input/hydro/hydro_ini.py index 95f48a48f3..9e07a32506 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/root/input/hydro/hydro_ini.py +++ b/antarest/study/storage/rawstudy/model/filesystem/root/input/hydro/hydro_ini.py @@ -5,6 +5,7 @@ class InputHydroIni(IniFileNode): def __init__(self, context: ContextServer, config: FileStudyTreeConfig): + # TODO: missing "use heuristic", "follow load" and "reservoir capacity" sections = [ "inter-daily-breakdown", "intra-daily-modulation", diff --git a/tests/integration/test_integration.py b/tests/integration/test_integration.py index 940b4c785a..6504472543 100644 --- a/tests/integration/test_integration.py +++ b/tests/integration/test_integration.py @@ -1497,8 +1497,10 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: table_mode_url, headers=admin_headers, params={ - "table_type": TableTemplateType.CLUSTER.value, - "columns": ",".join(FIELDS_INFO_BY_TYPE[TableTemplateType.CLUSTER]), + "table_type": TableTemplateType.THERMAL_CLUSTER, + "columns": ",".join( + FIELDS_INFO_BY_TYPE[TableTemplateType.THERMAL_CLUSTER] + ), }, ) res_table_data_json = res_table_data.json() @@ -1553,7 +1555,7 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: table_mode_url, headers=admin_headers, params={ - "table_type": TableTemplateType.CLUSTER.value, + "table_type": TableTemplateType.THERMAL_CLUSTER, }, json={ "area 1 / cluster 1": { @@ -1572,8 +1574,10 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: table_mode_url, headers=admin_headers, params={ - "table_type": TableTemplateType.CLUSTER.value, - "columns": ",".join(FIELDS_INFO_BY_TYPE[TableTemplateType.CLUSTER]), + "table_type": TableTemplateType.THERMAL_CLUSTER, + "columns": ",".join( + FIELDS_INFO_BY_TYPE[TableTemplateType.THERMAL_CLUSTER] + ), }, ) res_table_data_json = res_table_data.json() @@ -1630,8 +1634,10 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: table_mode_url, headers=admin_headers, params={ - "table_type": TableTemplateType.RENEWABLE.value, - "columns": ",".join(FIELDS_INFO_BY_TYPE[TableTemplateType.RENEWABLE]), + "table_type": TableTemplateType.RENEWABLE_CLUSTER, + "columns": ",".join( + FIELDS_INFO_BY_TYPE[TableTemplateType.RENEWABLE_CLUSTER] + ), }, ) res_table_data_json = res_table_data.json() @@ -1656,7 +1662,7 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: table_mode_url, headers=admin_headers, params={ - "table_type": TableTemplateType.RENEWABLE.value, + "table_type": TableTemplateType.RENEWABLE_CLUSTER, }, json={ "area 1 / cluster renewable 1": { @@ -1673,8 +1679,10 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: table_mode_url, headers=admin_headers, params={ - "table_type": TableTemplateType.RENEWABLE.value, - "columns": ",".join(FIELDS_INFO_BY_TYPE[TableTemplateType.RENEWABLE]), + "table_type": TableTemplateType.RENEWABLE_CLUSTER, + "columns": ",".join( + FIELDS_INFO_BY_TYPE[TableTemplateType.RENEWABLE_CLUSTER] + ), }, ) res_table_data_json = res_table_data.json() From 7a69d0a712ea54b3aa45210ad2923bfcb90945de Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Tue, 12 Dec 2023 10:38:28 +0100 Subject: [PATCH 071/147] test(tablemode): refactor unit tests (WIP) --- .../study_data_blueprint/test_table_mode.py | 64 +++++++++++++++++++ tests/integration/test_integration.py | 1 - 2 files changed, 64 insertions(+), 1 deletion(-) create mode 100644 tests/integration/study_data_blueprint/test_table_mode.py diff --git a/tests/integration/study_data_blueprint/test_table_mode.py b/tests/integration/study_data_blueprint/test_table_mode.py new file mode 100644 index 0000000000..61bede52ce --- /dev/null +++ b/tests/integration/study_data_blueprint/test_table_mode.py @@ -0,0 +1,64 @@ +import re + +import numpy as np +import pytest +from starlette.testclient import TestClient + +from antarest.core.tasks.model import TaskStatus +from antarest.study.storage.rawstudy.model.filesystem.config.model import transform_name_to_id +from tests.integration.utils import wait_task_completion + + +@pytest.mark.unit_test +class TestTableMode: + """ + Test the end points related to the table mode. + + Those tests use the "examples/studies/STA-mini.zip" Study, + which contains the following areas: ["de", "es", "fr", "it"]. + """ + + def test_lifecycle__nominal( + self, + client: TestClient, + user_access_token: str, + study_id: str, + ) -> None: + # we are working with the "DE" area + area_id = "de" + user_headers = {"Authorization": f"Bearer {user_access_token}"} + + # Table Mode - Area + res = client.get( + f"/v1/studies/{study_id}/tablemode/form", + headers=user_headers, + params={ + "table_type": "area", + "columns": ",".join(["nonDispatchablePower", "dispatchableHydroPower", "otherDispatchablePower"]), + }, + ) + assert res.status_code == 200, res.json() + expected = { + "de": { + "dispatchableHydroPower": True, + "nonDispatchablePower": True, + "otherDispatchablePower": True, + }, + "es": { + "dispatchableHydroPower": True, + "nonDispatchablePower": True, + "otherDispatchablePower": True, + }, + "fr": { + "dispatchableHydroPower": True, + "nonDispatchablePower": True, + "otherDispatchablePower": True, + }, + "it": { + "dispatchableHydroPower": True, + "nonDispatchablePower": True, + "otherDispatchablePower": True, + }, + } + actual = res.json() + assert actual == expected diff --git a/tests/integration/test_integration.py b/tests/integration/test_integration.py index 6504472543..d2736727d8 100644 --- a/tests/integration/test_integration.py +++ b/tests/integration/test_integration.py @@ -19,7 +19,6 @@ UnfeasibleProblemBehavior, ) from antarest.study.business.table_mode_management import ( - FIELDS_INFO_BY_TYPE, AssetType, BindingConstraintOperator, TableTemplateType, From 53c3e4c7f7650edb025ae68b5ce8ed566b21aa71 Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Thu, 18 Jan 2024 14:27:05 +0100 Subject: [PATCH 072/147] feat(tablemode): add methods in managers to get all clusters/short-term storages --- .../business/areas/renewable_management.py | 38 +++++++ .../business/areas/st_storage_management.py | 34 ++++++ .../business/areas/thermal_management.py | 37 ++++++ .../areas/test_st_storage_management.py | 107 ++++++++++++++++++ 4 files changed, 216 insertions(+) diff --git a/antarest/study/business/areas/renewable_management.py b/antarest/study/business/areas/renewable_management.py index 84f6e56672..4d51827c7a 100644 --- a/antarest/study/business/areas/renewable_management.py +++ b/antarest/study/business/areas/renewable_management.py @@ -32,6 +32,7 @@ _CLUSTER_PATH = "input/renewables/clusters/{area_id}/list/{cluster_id}" _CLUSTERS_PATH = "input/renewables/clusters/{area_id}/list" +_ALL_CLUSTERS_PATH = "input/renewables/clusters" class TimeSeriesInterpretation(EnumIgnoreCase): @@ -145,6 +146,43 @@ def get_clusters(self, study: Study, area_id: str) -> t.Sequence[RenewableCluste return [create_renewable_output(study.version, cluster_id, cluster) for cluster_id, cluster in clusters.items()] + def get_all_clusters( + self, + study: Study, + ) -> t.Mapping[str, t.Sequence[RenewableClusterOutput]]: + """ + Retrieve all renewable clusters from all areas within a study. + + Args: + study: Study from which to retrieve the clusters. + + Returns: + A mapping of area IDs to lists of renewable clusters within the specified area. + + Raises: + RenewableClusterConfigNotFound: If no clusters are found in the specified area. + """ + + file_study = self._get_file_study(study) + path = _ALL_CLUSTERS_PATH + try: + # may raise KeyError if the path is missing + clusters = file_study.tree.get(path.split("/"), depth=5) + # may raise KeyError if "list" is missing + clusters = {area_id: cluster_list["list"] for area_id, cluster_list in clusters.items()} + except KeyError: + raise RenewableClusterConfigNotFound(path) + + study_version = study.version + all_clusters = { + area_id: [ + create_renewable_output(study_version, cluster_id, cluster) + for cluster_id, cluster in cluster_obj.items() + ] + for area_id, cluster_obj in clusters.items() + } + return all_clusters + def create_cluster( self, study: Study, area_id: str, cluster_data: RenewableClusterCreation ) -> RenewableClusterOutput: diff --git a/antarest/study/business/areas/st_storage_management.py b/antarest/study/business/areas/st_storage_management.py index 73f03b8ec3..a54c583763 100644 --- a/antarest/study/business/areas/st_storage_management.py +++ b/antarest/study/business/areas/st_storage_management.py @@ -216,6 +216,7 @@ def validate_rule_curve( _STORAGE_LIST_PATH = "input/st-storage/clusters/{area_id}/list/{storage_id}" _STORAGE_SERIES_PATH = "input/st-storage/series/{area_id}/{storage_id}/{ts_name}" +_ALL_STORAGE_PATH = "input/st-storage/clusters" def _get_values_by_ids(file_study: FileStudy, area_id: str) -> t.Mapping[str, t.Mapping[str, t.Any]]: @@ -327,6 +328,39 @@ def get_storages( storages = [create_storage_output(study_version, storage_id, options) for storage_id, options in config.items()] return sorted(storages, key=order_by) + def get_all_storages( + self, + study: Study, + ) -> t.Mapping[str, t.Sequence[STStorageOutput]]: + """ + Retrieve all short-term storages from all areas within a study. + + Args: + study: Study from which to retrieve the storages. + + Returns: + A mapping of area IDs to lists of short-term storages within the specified area. + + Raises: + STStorageConfigNotFound: If no storages are found in the specified area. + """ + + file_study = self._get_file_study(study) + path = _ALL_STORAGE_PATH + try: + # may raise KeyError if the path is missing + storages = file_study.tree.get(path.split("/"), depth=5) + # may raise KeyError if "list" is missing + storages = {area_id: cluster_list["list"] for area_id, cluster_list in storages.items()} + except KeyError: + raise STStorageConfigNotFound(path) from None + + all_storages = { + area_id: [STStorageOutput.from_config(cluster_id, cluster) for cluster_id, cluster in cluster_obj.items()] + for area_id, cluster_obj in storages.items() + } + return all_storages + def get_storage( self, study: Study, diff --git a/antarest/study/business/areas/thermal_management.py b/antarest/study/business/areas/thermal_management.py index d5520c0d43..73b7431742 100644 --- a/antarest/study/business/areas/thermal_management.py +++ b/antarest/study/business/areas/thermal_management.py @@ -37,6 +37,7 @@ _CLUSTER_PATH = "input/thermal/clusters/{area_id}/list/{cluster_id}" _CLUSTERS_PATH = "input/thermal/clusters/{area_id}/list" +_ALL_CLUSTERS_PATH = "input/thermal/clusters" @camel_case_model @@ -186,6 +187,42 @@ def get_clusters( study_version = study.version return [create_thermal_output(study_version, cluster_id, cluster) for cluster_id, cluster in clusters.items()] + def get_all_clusters( + self, + study: Study, + ) -> t.Mapping[str, t.Sequence[ThermalClusterOutput]]: + """ + Retrieve all thermal clusters from all areas within a study. + + Args: + study: Study from which to retrieve the clusters. + + Returns: + A mapping of area IDs to lists of thermal clusters within the specified area. + + Raises: + ThermalClusterConfigNotFound: If no clusters are found in the specified area. + """ + + file_study = self._get_file_study(study) + path = _ALL_CLUSTERS_PATH + try: + # may raise KeyError if the path is missing + clusters = file_study.tree.get(path.split("/"), depth=5) + # may raise KeyError if "list" is missing + clusters = {area_id: cluster_list["list"] for area_id, cluster_list in clusters.items()} + except KeyError: + raise ThermalClusterConfigNotFound(path) from None + + study_version = study.version + all_clusters = { + area_id: [ + create_thermal_output(study_version, cluster_id, cluster) for cluster_id, cluster in cluster_obj.items() + ] + for area_id, cluster_obj in clusters.items() + } + return all_clusters + def create_cluster(self, study: Study, area_id: str, cluster_data: ThermalClusterCreation) -> ThermalClusterOutput: """ Create a new cluster. diff --git a/tests/study/business/areas/test_st_storage_management.py b/tests/study/business/areas/test_st_storage_management.py index 4c4db4e784..f81171831b 100644 --- a/tests/study/business/areas/test_st_storage_management.py +++ b/tests/study/business/areas/test_st_storage_management.py @@ -59,6 +59,11 @@ LIST_CFG = IniReader().read(io.StringIO(LIST_INI)) +ALL_STORAGES = { + "west": {"list": LIST_CFG}, + "east": {"list": {}}, +} + class TestSTStorageManager: @pytest.fixture(name="study_storage_service") @@ -99,6 +104,108 @@ def study_uuid_fixture(self, db_session: Session) -> str: db_session.commit() return t.cast(str, raw_study.id) + def test_get_all_storages__nominal_case( + self, + db_session: Session, + study_storage_service: StudyStorageService, + study_uuid: str, + ) -> None: + """ + This unit test is to verify the behavior of the `get_all_storages` + method in the `STStorageManager` class under nominal conditions. + It checks whether the method returns the expected storage lists + for each area, based on a specific configuration. + """ + # The study must be fetched from the database + study: RawStudy = db_session.query(Study).get(study_uuid) + + # Prepare the mocks + storage = study_storage_service.get_storage(study) + file_study = storage.get_raw(study) + file_study.tree = Mock( + spec=FileStudyTree, + get=Mock(return_value=ALL_STORAGES), + ) + + # Given the following arguments + manager = STStorageManager(study_storage_service) + + # run + all_storages = manager.get_all_storages(study) + + # Check + actual = {area_id: [form.dict(by_alias=True) for form in forms] for area_id, forms in all_storages.items()} + expected = { + "west": [ + { + "id": "storage1", + "group": STStorageGroup.BATTERY, + "name": "Storage1", + "injectionNominalCapacity": 1500.0, + "withdrawalNominalCapacity": 1500.0, + "reservoirCapacity": 20000.0, + "efficiency": 0.94, + "initialLevel": 0.5, + "initialLevelOptim": True, + }, + { + "id": "storage2", + "group": STStorageGroup.PSP_CLOSED, + "name": "Storage2", + "injectionNominalCapacity": 2000.0, + "withdrawalNominalCapacity": 1500.0, + "reservoirCapacity": 20000.0, + "efficiency": 0.78, + "initialLevel": 0.5, + "initialLevelOptim": False, + }, + { + "id": "storage3", + "group": STStorageGroup.PSP_CLOSED, + "name": "Storage3", + "injectionNominalCapacity": 1500.0, + "withdrawalNominalCapacity": 1500.0, + "reservoirCapacity": 21000.0, + "efficiency": 0.72, + "initialLevel": 1.0, + "initialLevelOptim": False, + }, + ], + "east": [], + } + assert actual == expected + + def test_get_all_storages__config_not_found( + self, + db_session: Session, + study_storage_service: StudyStorageService, + study_uuid: str, + ) -> None: + """ + This test verifies that when the `get_all_storages` method is called + with a study and the corresponding configuration is not found + (indicated by the `KeyError` raised by the mock), it correctly + raises the `STStorageConfigNotFound` exception with the expected error + message containing the study ID. + """ + # The study must be fetched from the database + study: RawStudy = db_session.query(Study).get(study_uuid) + + # Prepare the mocks + storage = study_storage_service.get_storage(study) + file_study = storage.get_raw(study) + file_study.tree = Mock( + spec=FileStudyTree, + get=Mock(side_effect=KeyError("Oops!")), + ) + + # Given the following arguments + manager = STStorageManager(study_storage_service) + + # run + with pytest.raises(STStorageConfigNotFound, match="not found"): + manager.get_all_storages(study) + def test_get_st_storages__nominal_case( self, db_session: Session, From 3c6ab14cc0eef89dc5eb4ae5e7da479c91ec3fb5 Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Thu, 18 Jan 2024 22:14:17 +0100 Subject: [PATCH 073/147] feat(tablemode): add the `AreaProperties` model --- .../rawstudy/model/filesystem/config/area.py | 608 ++++++++++++++++++ 1 file changed, 608 insertions(+) create mode 100644 antarest/study/storage/rawstudy/model/filesystem/config/area.py diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/area.py b/antarest/study/storage/rawstudy/model/filesystem/config/area.py new file mode 100644 index 0000000000..c62e31436a --- /dev/null +++ b/antarest/study/storage/rawstudy/model/filesystem/config/area.py @@ -0,0 +1,608 @@ +""" +Object model used to read and update area configuration. +""" +import json +import re +import typing as t + +import typing_extensions as te +from pydantic import BaseModel, Extra, Field, root_validator, validator + +from antarest.study.business.enum_ignore_case import EnumIgnoreCase + + +class Properties( + BaseModel, + # On reading, if the configuration contains an extra field, it is ignored. + # This allows to read configurations that contain extra fields + # that are not yet managed by the code or that are deprecated. + extra=Extra.ignore, + # If a field is updated on assignment, it is also validated. + validate_assignment=True, + # On testing, we can use snake_case for field names. + allow_population_by_field_name=True, +): + """ + Base class for configuration sections. + """ + + def to_config(self) -> t.Mapping[str, t.Any]: + """ + Convert the object to a dictionary for writing to a configuration file (`*.ini`). + + Returns: + A dictionary with the configuration values. + """ + + config = {} + for field_name, field in self.__fields__.items(): + value = getattr(self, field_name) + if value is None: + continue + if isinstance(value, Properties): + config[field.alias] = value.to_config() + else: + config[field.alias] = json.loads(json.dumps(value)) + return config + + @classmethod + def construct(cls, _fields_set: t.Optional[t.Set[str]] = None, **values: t.Any) -> "Properties": + """ + Construct a new model instance from a dict of values, replacing aliases with real field names. + """ + # The pydantic construct() function does not allow aliases to be handled. + aliases = {(field.alias or name): name for name, field in cls.__fields__.items()} + renamed_values = {aliases.get(k, k): v for k, v in values.items()} + if _fields_set is not None: + _fields_set = {aliases.get(f, f) for f in _fields_set} + # noinspection PyTypeChecker + return super().construct(_fields_set, **renamed_values) + + +# noinspection SpellCheckingInspection +class OptimizationProperties(Properties): + """ + Object linked to `/input/areas//optimization.ini` information. + + Usage: + + >>> from antarest.study.storage.rawstudy.model.filesystem.config.area import OptimizationProperties + >>> from pprint import pprint + + Create and validate a new Optimization object from a dictionary read from a configuration file. + + >>> obj = { + ... "filtering": { + ... "filter-synthesis": "hourly, daily, weekly, monthly, annual", + ... "filter-year-by-year": "annual,hourly", + ... }, + ... "nodal optimization": { + ... "non-dispatchable-power": "true", + ... "dispatchable-hydro-power": "false", + ... "spread-unsupplied-energy-cost": "1500", + ... "spread-spilled-energy-cost": "317.2500", + ... }, + ... } + + >>> opt = OptimizationProperties.parse_obj(obj) + + >>> pprint(opt.dict(by_alias=True), width=80) + {'filtering': {'filter-synthesis': 'hourly, daily, weekly, monthly, annual', + 'filter-year-by-year': 'hourly, annual'}, + 'nodal optimization': {'dispatchable-hydro-power': False, + 'non-dispatchable-power': True, + 'other-dispatchable-power': True, + 'spread-spilled-energy-cost': 317.25, + 'spread-unsupplied-energy-cost': 1500.0}} + + Update the filtering configuration : + + >>> opt.filtering.filter_synthesis = "hourly,weekly,monthly,annual,century" + >>> opt.filtering.filter_year_by_year = "hourly, monthly, annual" + + Update the modal optimization configuration : + + >>> opt.nodal_optimization.non_dispatchable_power = False + >>> opt.nodal_optimization.spread_spilled_energy_cost = 0.0 + + Convert the object to a dictionary for writing to a configuration file: + + >>> pprint(opt.dict(by_alias=True, exclude_defaults=True), width=80) + {'filtering': {'filter-synthesis': 'hourly, weekly, monthly, annual', + 'filter-year-by-year': 'hourly, monthly, annual'}, + 'nodal optimization': {'dispatchable-hydro-power': False, + 'non-dispatchable-power': False, + 'spread-unsupplied-energy-cost': 1500.0}} + """ + + class FilteringSection(Properties): + """Configuration read from section `[filtering]` of `/input/areas//optimization.ini`.""" + + filter_synthesis: str = Field("hourly, daily, weekly, monthly, annual", alias="filter-synthesis") + filter_year_by_year: str = Field("hourly, daily, weekly, monthly, annual", alias="filter-year-by-year") + + @validator("filter_synthesis", "filter_year_by_year", pre=True) + def _validate_filtering(cls, v: t.Any) -> str: + if isinstance(v, str): + values = list(set(re.findall(r"hourly|daily|weekly|monthly|annual", v.lower()))) + values.sort(key=lambda x: ["hourly", "daily", "weekly", "monthly", "annual"].index(x)) + return ", ".join(values) + raise TypeError(f"Invalid type for filtering: {type(v)}") + + # noinspection SpellCheckingInspection + class ModalOptimizationSection(Properties): + """Configuration read from section `[nodal optimization]` of `/input/areas//optimization.ini`.""" + + non_dispatchable_power: bool = Field(default=True, alias="non-dispatchable-power") + dispatchable_hydro_power: bool = Field(default=True, alias="dispatchable-hydro-power") + other_dispatchable_power: bool = Field(default=True, alias="other-dispatchable-power") + spread_unsupplied_energy_cost: float = Field(default=0.0, ge=0, alias="spread-unsupplied-energy-cost") + spread_spilled_energy_cost: float = Field(default=0.0, ge=0, alias="spread-spilled-energy-cost") + + filtering: FilteringSection = Field( + default_factory=FilteringSection, + alias="filtering", + ) + nodal_optimization: ModalOptimizationSection = Field( + default_factory=ModalOptimizationSection, + alias="nodal optimization", + ) + + +class AdequacyPatchMode(EnumIgnoreCase): + """ + Adequacy patch mode. + + Only available if study version >= 830. + """ + + OUTSIDE = "outside" + INSIDE = "inside" + VIRTUAL = "virtual" + + +class AdequacyPathProperties(Properties): + """ + Object linked to `/input/areas//adequacy_patch.ini` information. + + Only available if study version >= 830. + """ + + class AdequacyPathSection(Properties): + """Configuration read from section `[adequacy-patch]` of `/input/areas//adequacy_patch.ini`.""" + + adequacy_patch_mode: AdequacyPatchMode = Field(default=AdequacyPatchMode.OUTSIDE, alias="adequacy-patch-mode") + + adequacy_patch: AdequacyPathSection = Field(default_factory=AdequacyPathSection, alias="adequacy-patch") + + +class AreaUI(Properties): + """ + Style of an area in the map or in a layer. + + Usage: + + >>> from antarest.study.storage.rawstudy.model.filesystem.config.area import AreaUI + >>> from pprint import pprint + + Create and validate a new AreaUI object from a dictionary read from a configuration file. + + >>> obj = { + ... "x": 1148, + ... "y": 144, + ... "color_r": 0, + ... "color_g": 128, + ... "color_b": 255, + ... } + >>> ui = AreaUI.parse_obj(obj) + >>> pprint(ui.dict(by_alias=True), width=80) + {'colorRgb': (0, 128, 255), 'x': 1148, 'y': 144} + + Update the color: + + >>> ui.color_rgb = (192, 168, 127) + >>> pprint(ui.dict(by_alias=True), width=80) + {'colorRgb': (192, 168, 127), 'x': 1148, 'y': 144} + """ + + x: int = Field(0, description="x coordinate of the area in the map") + y: int = Field(0, description="x coordinate of the area in the map") + color_rgb: t.Tuple[int, int, int] = Field( + (230, 108, 44), + alias="colorRgb", + description="color of the area in the map", + ) + + @root_validator(pre=True) + def _validate_colors(cls, values: t.MutableMapping[str, t.Any]) -> t.Mapping[str, t.Any]: + # Parse the `[ui]` section (if any) + color_r = values.pop("color_r", None) + color_g = values.pop("color_g", None) + color_b = values.pop("color_b", None) + if color_r is not None and color_g is not None and color_b is not None: + values["color_rgb"] = color_r, color_g, color_b + return values + + @validator("color_rgb", pre=True) + def _validate_color_rgb(cls, v: t.Any) -> t.Tuple[int, int, int]: + if isinstance(v, str): + if v.startswith("#"): + r = int(v[1:3], 16) + g = int(v[3:5], 16) + b = int(v[5:7], 16) + elif v.startswith("rgb("): + r, g, b = [int(c) for c in v[4:-1].split(",")] + else: + r, g, b = [int(c) for c in v.split(",")] + return r, g, b + + elif isinstance(v, (list, tuple)): + r, g, b = v + return r, g, b + + else: + raise TypeError(f"Invalid type for 'color_rgb': {type(v)}") + + def to_config(self) -> t.Mapping[str, t.Any]: + """ + Convert the object to a dictionary for writing to a configuration file: + + Usage: + + >>> from antarest.study.storage.rawstudy.model.filesystem.config.area import AreaUI + >>> from pprint import pprint + + >>> ui = AreaUI(x=1148, y=144, color_rgb=(0, 128, 255)) + >>> pprint(ui.to_config(), width=80) + {'color_b': 255, 'color_g': 128, 'color_r': 0, 'x': 1148, 'y': 144} + """ + return { + "x": self.x, + "y": self.y, + "color_r": self.color_rgb[0], + "color_g": self.color_rgb[1], + "color_b": self.color_rgb[2], + } + + +class UIProperties(Properties): + """ + Object linked to `/input/areas//ui.ini` information. + + Usage: + + >>> from antarest.study.storage.rawstudy.model.filesystem.config.area import UIProperties + >>> from pprint import pprint + + UIProperties has default values for `style` and `layers`: + + >>> ui = UIProperties() + >>> pprint(ui.dict(), width=80) + {'layer_styles': {0: {'color_rgb': (230, 108, 44), 'x': 0, 'y': 0}}, + 'layers': {0}, + 'style': {'color_rgb': (230, 108, 44), 'x': 0, 'y': 0}} + + Create and validate a new UI object from a dictionary read from a configuration file. + + >>> obj = { + ... "ui": { + ... "x": 1148, + ... "y": 144, + ... "color_r": 0, + ... "color_g": 128, + ... "color_b": 255, + ... "layers": "0 7", + ... }, + ... "layerX": {"0": 1148, "7": 18}, + ... "layerY": {"0": 144, "7": -22}, + ... "layerColor": { + ... "0": "0 , 128 , 255", + ... "4": "0 , 128 , 255", + ... "6": "192 , 168 , 99", + ... "7": "0 , 128 , 255", + ... "8": "0 , 128 , 255", + ... }, + ... } + + >>> ui = UIProperties.parse_obj(obj) + >>> pprint(ui.dict(), width=80) + {'layer_styles': {0: {'color_rgb': (0, 128, 255), 'x': 1148, 'y': 144}, + 4: {'color_rgb': (0, 128, 255), 'x': 1148, 'y': 144}, + 6: {'color_rgb': (192, 168, 99), 'x': 1148, 'y': 144}, + 7: {'color_rgb': (0, 128, 255), 'x': 18, 'y': -22}, + 8: {'color_rgb': (0, 128, 255), 'x': 1148, 'y': 144}}, + 'layers': {0, 7}, + 'style': {'color_rgb': (0, 128, 255), 'x': 1148, 'y': 144}} + + """ + + style: AreaUI = Field( + default_factory=AreaUI, + description="style of the area in the map: coordinates and color", + ) + layers: t.Set[int] = Field( + default_factory=set, + description="layers where the area is visible", + ) + layer_styles: t.Dict[int, AreaUI] = Field( + default_factory=dict, + description="style of the area in each layer", + alias="layerStyles", + ) + + @root_validator(pre=True) + def _validate_layers(cls, values: t.MutableMapping[str, t.Any]) -> t.Mapping[str, t.Any]: + # Defined the default style if missing + style = values.get("style") + if style is None: + values["style"] = AreaUI() + elif isinstance(style, dict): + values["style"] = AreaUI(**style) + else: + values["style"] = AreaUI(**style.dict()) + + # Define the default layers if missing + layers = values.get("layers") + if layers is None: + values["layers"] = {0} + + # Define the default layer styles if missing + layer_styles = values.get("layer_styles") + if layer_styles is None: + values["layer_styles"] = {0: AreaUI()} + elif isinstance(layer_styles, dict): + values["layer_styles"] = {0: AreaUI()} + for key, style in layer_styles.items(): + key = int(key) + if isinstance(style, dict): + values["layer_styles"][key] = AreaUI(**style) + else: + values["layer_styles"][key] = AreaUI(**style.dict()) + else: + raise TypeError(f"Invalid type for layer_styles: {type(layer_styles)}") + + # Parse the `[ui]` section (if any) + ui_section = values.pop("ui", {}) + if ui_section: + # If `layers` is a single integer, convert it to `str` first + layers = str(ui_section.pop("layers", "0")) + values["layers"] = set([int(layer) for layer in layers.split()]) + values["style"].x = ui_section.pop("x", values["style"].x) + values["style"].y = ui_section.pop("y", values["style"].y) + values["style"].color_rgb = ( + ui_section.pop("color_r", values["style"].color_rgb[0]), + ui_section.pop("color_g", values["style"].color_rgb[1]), + ui_section.pop("color_b", values["style"].color_rgb[2]), + ) + + # Parse the `[layerX]`, `[layerY]` and `[layerColor]` sections (if any) + layer_x_section = values.pop("layerX", {}) + layer_y_section = values.pop("layerY", {}) + layer_color_section = values.pop("layerColor", {}) + # Key are converted to `int` and values to `str` (for splitting) + layer_x_section = {int(layer): str(x) for layer, x in layer_x_section.items()} + layer_y_section = {int(layer): str(y) for layer, y in layer_y_section.items()} + layer_color_section = {int(layer): str(color) for layer, color in layer_color_section.items()} + # indexes must contain all the keys from the three sections + indexes = set(layer_x_section) | set(layer_y_section) | set(layer_color_section) + if indexes: + layer_styles = {index: values["style"].copy() for index in indexes} + for layer, x in layer_x_section.items(): + layer_styles[layer].x = int(x) + for layer, y in layer_y_section.items(): + layer_styles[layer].y = int(y) + for layer, color in layer_color_section.items(): + r, g, b = [int(c) for c in color.split(",")] + layer_styles[layer].color_rgb = r, g, b + values["layer_styles"].update(layer_styles) + values["layers"] = values["layers"].intersection(indexes) + + return values + + def to_config(self) -> t.Mapping[str, t.Mapping[str, t.Any]]: + """ + Convert the object to a dictionary for writing to a configuration file: + + Usage: + + >>> from antarest.study.storage.rawstudy.model.filesystem.config.area import UIProperties + >>> from pprint import pprint + + >>> ui = UIProperties( + ... style=AreaUI(x=1148, y=144, color_rgb=(0, 128, 255)), + ... layers={0, 7}, + ... layer_styles={ + ... 6: AreaUI(x=1148, y=144, color_rgb=(192, 168, 99)), + ... 7: AreaUI(x=18, y=-22, color_rgb=(0, 128, 255)), + ... }) + >>> pprint(ui.to_config(), width=80) + {'layerColor': {'0': '230, 108, 44', '6': '192, 168, 99', '7': '0, 128, 255'}, + 'layerX': {'0': 0, '6': 1148, '7': 18}, + 'layerY': {'0': 0, '6': 144, '7': -22}, + 'ui': {'color_b': 255, + 'color_g': 128, + 'color_r': 0, + 'layers': '0 7', + 'x': 1148, + 'y': 144}} + """ + obj: t.MutableMapping[str, t.MutableMapping[str, t.Any]] = { + "ui": {}, + "layerX": {}, + "layerY": {}, + "layerColor": {}, + } + obj["ui"].update(self.style.to_config()) + obj["ui"]["layers"] = " ".join(str(layer) for layer in sorted(self.layers)) + for layer, style in self.layer_styles.items(): + obj["layerX"][str(layer)] = style.x + obj["layerY"][str(layer)] = style.y + obj["layerColor"][str(layer)] = ", ".join(str(c) for c in style.color_rgb) + return obj + + +class AreaFolder(Properties): + """ + Object linked to `/input/areas/` information. + + Usage: + + >>> from antarest.study.storage.rawstudy.model.filesystem.config.area import AreaFolder + >>> from pprint import pprint + + Create and validate a new AreaProperties object from a dictionary read from a configuration file. + + >>> obj = AreaFolder() + >>> pprint(obj.dict(), width=80) + {'adequacy_patch': None, + 'optimization': {'filtering': {'filter_synthesis': 'hourly, daily, weekly, ' + 'monthly, annual', + 'filter_year_by_year': 'hourly, daily, weekly, ' + 'monthly, annual'}, + 'nodal_optimization': {'dispatchable_hydro_power': True, + 'non_dispatchable_power': True, + 'other_dispatchable_power': True, + 'spread_spilled_energy_cost': 0.0, + 'spread_unsupplied_energy_cost': 0.0}}, + 'ui': {'layer_styles': {0: {'color_rgb': (230, 108, 44), 'x': 0, 'y': 0}}, + 'layers': {0}, + 'style': {'color_rgb': (230, 108, 44), 'x': 0, 'y': 0}}} + + >>> pprint(obj.to_config(), width=80) + {'optimization': {'filtering': {'filter-synthesis': 'hourly, daily, weekly, ' + 'monthly, annual', + 'filter-year-by-year': 'hourly, daily, weekly, ' + 'monthly, annual'}, + 'nodal optimization': {'dispatchable-hydro-power': True, + 'non-dispatchable-power': True, + 'other-dispatchable-power': True, + 'spread-spilled-energy-cost': 0.0, + 'spread-unsupplied-energy-cost': 0.0}}, + 'ui': {'layerColor': {'0': '230, 108, 44'}, + 'layerX': {'0': 0}, + 'layerY': {'0': 0}, + 'ui': {'color_b': 44, + 'color_g': 108, + 'color_r': 230, + 'layers': '0', + 'x': 0, + 'y': 0}}} + + We can construct an AreaProperties object from invalid data: + + >>> data = { + ... "optimization": { + ... "filtering": {"filter-synthesis": "annual, centennial"}, + ... "nodal optimization": { + ... "spread-spilled-energy-cost": "15.5", + ... "spread-unsupplied-energy-cost": "yes", + ... }, + ... }, + ... "ui": {"style": {"color_rgb": (0, 128, 256)}}, + ... } + + >>> obj = AreaFolder.construct(**data) + >>> pprint(obj.dict(), width=80) + {'adequacy_patch': None, + 'optimization': {'filtering': {'filter-synthesis': 'annual, centennial'}, + 'nodal optimization': {'spread-spilled-energy-cost': '15.5', + 'spread-unsupplied-energy-cost': 'yes'}}, + 'ui': {'style': {'color_rgb': (0, 128, 256)}}} + + >>> AreaFolder.validate(data) + Traceback (most recent call last): + ... + pydantic.error_wrappers.ValidationError: 1 validation error for AreaFolder + optimization -> nodal optimization -> spread-unsupplied-energy-cost + value is not a valid float (type=type_error.float) + """ + + optimization: OptimizationProperties = Field( + default_factory=OptimizationProperties, + description="optimization configuration", + ) + adequacy_patch: t.Optional[AdequacyPathProperties] = Field( + None, + description="adequacy patch configuration", + ) + ui: UIProperties = Field( + default_factory=UIProperties, + description="UI configuration", + ) + + +EnergyCost = te.Annotated[float, Field(ge=0, description="Energy cost (€/MWh)")] + + +# noinspection SpellCheckingInspection +class ThermalAreasProperties(Properties): + """ + Object linked to `/input/thermal/areas.ini` information. + + Usage: + + >>> from antarest.study.storage.rawstudy.model.filesystem.config.area import ThermalAreasProperties + >>> from pprint import pprint + + Create and validate a new ThermalArea object from a dictionary read from a configuration file:: + + [unserverdenergycost] + at = 4000.80 + be = 3500 + de = 1250 + fr = 138.50 + + [spilledenergycost] + cz = 100.0 + + >>> obj = { + ... "unserverdenergycost": { + ... "at": "4000.80", + ... "be": "3500", + ... "de": "1250", + ... "fr": "138.50", + ... }, + ... "spilledenergycost": { + ... "cz": "100.0", + ... }, + ... } + >>> area = ThermalAreasProperties.parse_obj(obj) + >>> pprint(area.dict(), width=80) + {'spilled_energy_cost': {'cz': 100.0}, + 'unserverd_energy_cost': {'at': 4000.8, + 'be': 3500.0, + 'de': 1250.0, + 'fr': 138.5}} + + Update the unserverd energy cost: + + >>> area.unserverd_energy_cost["at"] = 6500.0 + >>> area.unserverd_energy_cost["fr"] = 0.0 + >>> pprint(area.dict(), width=80) + {'spilled_energy_cost': {'cz': 100.0}, + 'unserverd_energy_cost': {'at': 6500.0, 'be': 3500.0, 'de': 1250.0, 'fr': 0.0}} + + Convert the object to a dictionary for writing to a configuration file: + + >>> pprint(area.to_config(), width=80) + {'spilledenergycost': {'cz': 100.0}, + 'unserverdenergycost': {'at': 6500.0, 'be': 3500.0, 'de': 1250.0, 'fr': 0.0}} + """ + + unserverd_energy_cost: t.MutableMapping[str, EnergyCost] = Field( + default_factory=dict, + alias="unserverdenergycost", + description="unserverd energy cost (€/MWh) of each area", + ) + + spilled_energy_cost: t.MutableMapping[str, EnergyCost] = Field( + default_factory=dict, + alias="spilledenergycost", + description="spilled energy cost (€/MWh) of each area", + ) + + @validator("unserverd_energy_cost", "spilled_energy_cost", pre=True) + def _validate_energy_cost(cls, v: t.Any) -> t.MutableMapping[str, EnergyCost]: + if isinstance(v, dict): + return {str(k): float(v) for k, v in v.items()} + raise TypeError(f"Invalid type for energy cost: {type(v)}") From fac35247d9b8f3d86cfde235214cfa628f4a3d40 Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Thu, 18 Jan 2024 22:34:40 +0100 Subject: [PATCH 074/147] feat(tablemode): replace `area_management.AreaUI` class with `area.AreaUI` --- antarest/study/business/area_management.py | 71 +++++++++---------- antarest/study/service.py | 3 +- antarest/study/web/study_data_blueprint.py | 1 + .../storage/business/test_arealink_manager.py | 3 +- 4 files changed, 38 insertions(+), 40 deletions(-) diff --git a/antarest/study/business/area_management.py b/antarest/study/business/area_management.py index 544f18d8cf..84265d8da1 100644 --- a/antarest/study/business/area_management.py +++ b/antarest/study/business/area_management.py @@ -1,7 +1,7 @@ +import enum import logging import re -from enum import Enum -from typing import Any, Dict, List, Optional, Sequence, Tuple +import typing as t from pydantic import BaseModel @@ -10,6 +10,7 @@ from antarest.study.model import Patch, PatchArea, PatchCluster, RawStudy, Study from antarest.study.repository import StudyMetadataRepository from antarest.study.storage.patch_service import PatchService +from antarest.study.storage.rawstudy.model.filesystem.config.area import AreaUI from antarest.study.storage.rawstudy.model.filesystem.config.model import Area, DistrictSet, transform_name_to_id from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy from antarest.study.storage.storage_service import StudyStorageService @@ -21,7 +22,7 @@ logger = logging.getLogger(__name__) -class AreaType(Enum): +class AreaType(enum.Enum): AREA = "AREA" DISTRICT = "DISTRICT" @@ -29,8 +30,8 @@ class AreaType(Enum): class AreaCreationDTO(BaseModel): name: str type: AreaType - metadata: Optional[PatchArea] - set: Optional[List[str]] + metadata: t.Optional[PatchArea] + set: t.Optional[t.List[str]] class ClusterInfoDTO(PatchCluster): @@ -39,34 +40,28 @@ class ClusterInfoDTO(PatchCluster): enabled: bool = True unitcount: int = 0 nominalcapacity: int = 0 - group: Optional[str] = None - min_stable_power: Optional[int] = None - min_up_time: Optional[int] = None - min_down_time: Optional[int] = None - spinning: Optional[float] = None - marginal_cost: Optional[float] = None - spread_cost: Optional[float] = None - market_bid_cost: Optional[float] = None + group: t.Optional[str] = None + min_stable_power: t.Optional[int] = None + min_up_time: t.Optional[int] = None + min_down_time: t.Optional[int] = None + spinning: t.Optional[float] = None + marginal_cost: t.Optional[float] = None + spread_cost: t.Optional[float] = None + market_bid_cost: t.Optional[float] = None class AreaInfoDTO(AreaCreationDTO): id: str - thermals: Optional[List[ClusterInfoDTO]] = None - - -class AreaUI(BaseModel): - x: int - y: int - color_rgb: Tuple[int, int, int] + thermals: t.Optional[t.List[ClusterInfoDTO]] = None class LayerInfoDTO(BaseModel): id: str name: str - areas: List[str] + areas: t.List[str] -def _get_ui_info_map(file_study: FileStudy, area_ids: Sequence[str]) -> Dict[str, Any]: +def _get_ui_info_map(file_study: FileStudy, area_ids: t.Sequence[str]) -> t.Dict[str, t.Any]: """ Get the UI information (a JSON object) for each selected Area. @@ -95,7 +90,7 @@ def _get_ui_info_map(file_study: FileStudy, area_ids: Sequence[str]) -> Dict[str return ui_info_map -def _get_area_layers(area_uis: Dict[str, Any], area: str) -> List[str]: +def _get_area_layers(area_uis: t.Dict[str, t.Any], area: str) -> t.List[str]: if area in area_uis and "ui" in area_uis[area] and "layers" in area_uis[area]["ui"]: return re.split(r"\s+", (str(area_uis[area]["ui"]["layers"]) or "")) return [] @@ -110,7 +105,7 @@ def __init__( self.storage_service = storage_service self.patch_service = PatchService(repository=repository) - def get_all_areas(self, study: RawStudy, area_type: Optional[AreaType] = None) -> List[AreaInfoDTO]: + def get_all_areas(self, study: RawStudy, area_type: t.Optional[AreaType] = None) -> t.List[AreaInfoDTO]: """ Retrieves all areas and districts of a raw study based on the area type. @@ -124,9 +119,9 @@ def get_all_areas(self, study: RawStudy, area_type: Optional[AreaType] = None) - storage_service = self.storage_service.get_storage(study) file_study = storage_service.get_raw(study) metadata = self.patch_service.get(study) - areas_metadata: Dict[str, PatchArea] = metadata.areas or {} - cfg_areas: Dict[str, Area] = file_study.config.areas - result: List[AreaInfoDTO] = [] + areas_metadata: t.Dict[str, PatchArea] = metadata.areas or {} + cfg_areas: t.Dict[str, Area] = file_study.config.areas + result: t.List[AreaInfoDTO] = [] if area_type is None or area_type == AreaType.AREA: result.extend( @@ -141,7 +136,7 @@ def get_all_areas(self, study: RawStudy, area_type: Optional[AreaType] = None) - ) if area_type is None or area_type == AreaType.DISTRICT: - cfg_sets: Dict[str, DistrictSet] = file_study.config.sets + cfg_sets: t.Dict[str, DistrictSet] = file_study.config.sets result.extend( AreaInfoDTO( id=set_id, @@ -155,7 +150,7 @@ def get_all_areas(self, study: RawStudy, area_type: Optional[AreaType] = None) - return result - def get_all_areas_ui_info(self, study: RawStudy) -> Dict[str, Any]: + def get_all_areas_ui_info(self, study: RawStudy) -> t.Dict[str, t.Any]: """ Retrieve information about all areas' user interface (UI) from the study. @@ -173,7 +168,7 @@ def get_all_areas_ui_info(self, study: RawStudy) -> Dict[str, Any]: area_ids = list(file_study.config.areas) return _get_ui_info_map(file_study, area_ids) - def get_layers(self, study: RawStudy) -> List[LayerInfoDTO]: + def get_layers(self, study: RawStudy) -> t.List[LayerInfoDTO]: storage_service = self.storage_service.get_storage(study) file_study = storage_service.get_raw(study) area_ids = list(file_study.config.areas) @@ -196,7 +191,7 @@ def get_layers(self, study: RawStudy) -> List[LayerInfoDTO]: for layer in layers ] - def update_layer_areas(self, study: RawStudy, layer_id: str, areas: List[str]) -> None: + def update_layer_areas(self, study: RawStudy, layer_id: str, areas: t.List[str]) -> None: logger.info(f"Updating layer {layer_id} with areas {areas}") file_study = self.storage_service.get_storage(study).get_raw(study) layers = file_study.tree.get(["layers", "layers", "layers"]) @@ -213,9 +208,9 @@ def update_layer_areas(self, study: RawStudy, layer_id: str, areas: List[str]) - ] to_remove_areas = [area for area in existing_areas if area not in areas] to_add_areas = [area for area in areas if area not in existing_areas] - commands: List[ICommand] = [] + commands: t.List[ICommand] = [] - def create_update_commands(area_id: str) -> List[ICommand]: + def create_update_commands(area_id: str) -> t.List[ICommand]: return [ UpdateConfig( target=f"input/areas/{area_id}/ui/layerX", @@ -235,7 +230,7 @@ def create_update_commands(area_id: str) -> List[ICommand]: ] for area in to_remove_areas: - area_to_remove_layers: List[str] = _get_area_layers(areas_ui, area) + area_to_remove_layers: t.List[str] = _get_area_layers(areas_ui, area) if layer_id in areas_ui[area]["layerX"]: del areas_ui[area]["layerX"][layer_id] if layer_id in areas_ui[area]["layerY"]: @@ -246,7 +241,7 @@ def create_update_commands(area_id: str) -> List[ICommand]: ) commands.extend(create_update_commands(area)) for area in to_add_areas: - area_to_add_layers: List[str] = _get_area_layers(areas_ui, area) + area_to_add_layers: t.List[str] = _get_area_layers(areas_ui, area) if layer_id not in areas_ui[area]["layerX"]: areas_ui[area]["layerX"][layer_id] = areas_ui[area]["ui"]["x"] if layer_id not in areas_ui[area]["layerY"]: @@ -423,7 +418,7 @@ def update_thermal_cluster_metadata( self, study: Study, area_id: str, - clusters_metadata: Dict[str, PatchCluster], + clusters_metadata: t.Dict[str, PatchCluster], ) -> AreaInfoDTO: file_study = self.storage_service.get_storage(study).get_raw(study) patch = self.patch_service.get(study) @@ -452,7 +447,7 @@ def delete_area(self, study: Study, area_id: str) -> None: def _update_with_cluster_metadata( area: str, info: ClusterInfoDTO, - cluster_patch: Dict[str, PatchCluster], + cluster_patch: t.Dict[str, PatchCluster], ) -> ClusterInfoDTO: patch = cluster_patch.get(f"{area}.{info.id}", PatchCluster()) info.code_oi = patch.code_oi @@ -460,7 +455,7 @@ def _update_with_cluster_metadata( return info @staticmethod - def _get_clusters(file_study: FileStudy, area: str, metadata_patch: Patch) -> List[ClusterInfoDTO]: + def _get_clusters(file_study: FileStudy, area: str, metadata_patch: Patch) -> t.List[ClusterInfoDTO]: thermal_clusters_data = file_study.tree.get(["input", "thermal", "clusters", area, "list"]) cluster_patch = metadata_patch.thermal_clusters or {} return [ diff --git a/antarest/study/service.py b/antarest/study/service.py index f6f04d4681..8e6ffa37ea 100644 --- a/antarest/study/service.py +++ b/antarest/study/service.py @@ -49,7 +49,7 @@ from antarest.study.business.adequacy_patch_management import AdequacyPatchManager from antarest.study.business.advanced_parameters_management import AdvancedParamsManager from antarest.study.business.allocation_management import AllocationManager -from antarest.study.business.area_management import AreaCreationDTO, AreaInfoDTO, AreaManager, AreaType, AreaUI +from antarest.study.business.area_management import AreaCreationDTO, AreaInfoDTO, AreaManager, AreaType from antarest.study.business.areas.hydro_management import HydroManager from antarest.study.business.areas.properties_management import PropertiesManager from antarest.study.business.areas.renewable_management import RenewableManager @@ -104,6 +104,7 @@ StudySortBy, ) from antarest.study.storage.matrix_profile import adjust_matrix_columns_index +from antarest.study.storage.rawstudy.model.filesystem.config.area import AreaUI from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfigDTO from antarest.study.storage.rawstudy.model.filesystem.folder_node import ChildNotFoundError from antarest.study.storage.rawstudy.model.filesystem.ini_file_node import IniFileNode diff --git a/antarest/study/web/study_data_blueprint.py b/antarest/study/web/study_data_blueprint.py index 4f46ec1fab..5a2ebc347a 100644 --- a/antarest/study/web/study_data_blueprint.py +++ b/antarest/study/web/study_data_blueprint.py @@ -65,6 +65,7 @@ from antarest.study.service import StudyService from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import BindingConstraintFrequency from antarest.study.storage.rawstudy.model.filesystem.config.model import transform_name_to_id +from antarest.study.storage.rawstudy.model.filesystem.config.area import AreaUI logger = logging.getLogger(__name__) diff --git a/tests/storage/business/test_arealink_manager.py b/tests/storage/business/test_arealink_manager.py index 4caee7b7bd..6d664d329b 100644 --- a/tests/storage/business/test_arealink_manager.py +++ b/tests/storage/business/test_arealink_manager.py @@ -11,11 +11,12 @@ from antarest.core.utils.fastapi_sqlalchemy import db from antarest.matrixstore.repository import MatrixContentRepository from antarest.matrixstore.service import SimpleMatrixService -from antarest.study.business.area_management import AreaCreationDTO, AreaManager, AreaType, AreaUI +from antarest.study.business.area_management import AreaCreationDTO, AreaManager, AreaType from antarest.study.business.link_management import LinkInfoDTO, LinkManager from antarest.study.model import Patch, PatchArea, PatchCluster, RawStudy, StudyAdditionalData from antarest.study.repository import StudyMetadataRepository from antarest.study.storage.patch_service import PatchService +from antarest.study.storage.rawstudy.model.filesystem.config.area import AreaUI from antarest.study.storage.rawstudy.model.filesystem.config.files import build from antarest.study.storage.rawstudy.model.filesystem.config.model import Area, DistrictSet, FileStudyTreeConfig, Link from antarest.study.storage.rawstudy.model.filesystem.config.thermal import ThermalConfig From 0767804231fa6e86f87c62953f5dcc850b6ea89a Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Fri, 19 Jan 2024 11:02:11 +0100 Subject: [PATCH 075/147] fix(renewable): add missing classes in `__all__` --- .../storage/rawstudy/model/filesystem/config/renewable.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/renewable.py b/antarest/study/storage/rawstudy/model/filesystem/config/renewable.py index 4d34e21637..57beb01b29 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/config/renewable.py +++ b/antarest/study/storage/rawstudy/model/filesystem/config/renewable.py @@ -7,10 +7,11 @@ from antarest.study.storage.rawstudy.model.filesystem.config.identifier import IgnoreCaseIdentifier __all__ = ( - "TimeSeriesInterpretation", - "RenewableProperties", + "RenewableClusterGroup", "RenewableConfig", "RenewableConfigType", + "RenewableProperties", + "TimeSeriesInterpretation", "create_renewable_config", "RenewableClusterGroup", ) From 6332cff39ad35aababee5321ceca1c95214da2c7 Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Fri, 19 Jan 2024 11:29:25 +0100 Subject: [PATCH 076/147] test(tablemode): drop dependency to `AdequacyPatchMode` in integration tests --- tests/integration/test_integration.py | 35 ++++++++++----------------- 1 file changed, 13 insertions(+), 22 deletions(-) diff --git a/tests/integration/test_integration.py b/tests/integration/test_integration.py index d2736727d8..3b77682658 100644 --- a/tests/integration/test_integration.py +++ b/tests/integration/test_integration.py @@ -10,7 +10,6 @@ from antarest.launcher.model import LauncherLoadDTO from antarest.study.business.adequacy_patch_management import PriceTakingOrder from antarest.study.business.area_management import LayerInfoDTO -from antarest.study.business.areas.properties_management import AdequacyPatchMode from antarest.study.business.areas.renewable_management import TimeSeriesInterpretation from antarest.study.business.general_management import Mode from antarest.study.business.optimization_management import ( @@ -1160,7 +1159,7 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: "otherDispatchPower": True, "filterSynthesis": {"hourly", "daily", "weekly", "monthly", "annual"}, "filterByYear": {"hourly", "daily", "weekly", "monthly", "annual"}, - "adequacyPatchMode": AdequacyPatchMode.OUTSIDE.value, + "adequacyPatchMode": "outside", } client.put( @@ -1174,7 +1173,7 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: "otherDispatchPower": False, "filterSynthesis": ["monthly", "annual"], "filterByYear": ["hourly", "daily", "annual"], - "adequacyPatchMode": AdequacyPatchMode.INSIDE.value, + "adequacyPatchMode": "inside", }, ) res_properties_config = client.get(f"/v1/studies/{study_id}/areas/area 1/properties/form", headers=admin_headers) @@ -1189,7 +1188,7 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: "otherDispatchPower": False, "filterSynthesis": {"monthly", "annual"}, "filterByYear": {"hourly", "daily", "annual"}, - "adequacyPatchMode": AdequacyPatchMode.INSIDE.value, + "adequacyPatchMode": "inside", } # Hydro form @@ -1352,7 +1351,7 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: "spreadSpilledEnergyCost": 0.0, "filterSynthesis": "monthly, annual", "filterYearByYear": "hourly, daily, annual", - "adequacyPatchMode": AdequacyPatchMode.INSIDE.value, + "adequacyPatchMode": "inside", }, "area 2": { "nonDispatchablePower": True, @@ -1364,7 +1363,7 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: "spreadSpilledEnergyCost": 0.0, "filterSynthesis": "hourly, daily, weekly, monthly, annual", "filterYearByYear": "hourly, daily, weekly, monthly, annual", - "adequacyPatchMode": AdequacyPatchMode.OUTSIDE.value, + "adequacyPatchMode": "outside", }, } @@ -1379,13 +1378,13 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: "nonDispatchablePower": True, "spreadSpilledEnergyCost": 1.1, "filterYearByYear": "monthly, annual", - "adequacyPatchMode": AdequacyPatchMode.OUTSIDE.value, + "adequacyPatchMode": "outside", }, "area 2": { "nonDispatchablePower": False, "spreadSpilledEnergyCost": 3.0, "filterSynthesis": "hourly", - "adequacyPatchMode": AdequacyPatchMode.INSIDE.value, + "adequacyPatchMode": "inside", }, }, ) @@ -1409,7 +1408,7 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: "spreadSpilledEnergyCost": 1.1, "filterSynthesis": "monthly, annual", "filterYearByYear": "monthly, annual", - "adequacyPatchMode": AdequacyPatchMode.OUTSIDE.value, + "adequacyPatchMode": "outside", }, "area 2": { "nonDispatchablePower": False, @@ -1421,7 +1420,7 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: "spreadSpilledEnergyCost": 3.0, "filterSynthesis": "hourly", "filterYearByYear": "hourly, daily, weekly, monthly, annual", - "adequacyPatchMode": AdequacyPatchMode.INSIDE.value, + "adequacyPatchMode": "inside", }, } @@ -1497,9 +1496,7 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: headers=admin_headers, params={ "table_type": TableTemplateType.THERMAL_CLUSTER, - "columns": ",".join( - FIELDS_INFO_BY_TYPE[TableTemplateType.THERMAL_CLUSTER] - ), + "columns": ",".join(FIELDS_INFO_BY_TYPE[TableTemplateType.THERMAL_CLUSTER]), }, ) res_table_data_json = res_table_data.json() @@ -1574,9 +1571,7 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: headers=admin_headers, params={ "table_type": TableTemplateType.THERMAL_CLUSTER, - "columns": ",".join( - FIELDS_INFO_BY_TYPE[TableTemplateType.THERMAL_CLUSTER] - ), + "columns": ",".join(FIELDS_INFO_BY_TYPE[TableTemplateType.THERMAL_CLUSTER]), }, ) res_table_data_json = res_table_data.json() @@ -1634,9 +1629,7 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: headers=admin_headers, params={ "table_type": TableTemplateType.RENEWABLE_CLUSTER, - "columns": ",".join( - FIELDS_INFO_BY_TYPE[TableTemplateType.RENEWABLE_CLUSTER] - ), + "columns": ",".join(FIELDS_INFO_BY_TYPE[TableTemplateType.RENEWABLE_CLUSTER]), }, ) res_table_data_json = res_table_data.json() @@ -1679,9 +1672,7 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: headers=admin_headers, params={ "table_type": TableTemplateType.RENEWABLE_CLUSTER, - "columns": ",".join( - FIELDS_INFO_BY_TYPE[TableTemplateType.RENEWABLE_CLUSTER] - ), + "columns": ",".join(FIELDS_INFO_BY_TYPE[TableTemplateType.RENEWABLE_CLUSTER]), }, ) res_table_data_json = res_table_data.json() From a8313e06ab0c963f530a0bd9161c32e8b28e8d75 Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Fri, 19 Jan 2024 11:36:42 +0100 Subject: [PATCH 077/147] chore(tablemode): replace `properties_management.AdequacyPatchMode` with `area.AdequacyPatchMode` --- antarest/study/business/areas/properties_management.py | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/antarest/study/business/areas/properties_management.py b/antarest/study/business/areas/properties_management.py index 96850d6d7b..2014c554dc 100644 --- a/antarest/study/business/areas/properties_management.py +++ b/antarest/study/business/areas/properties_management.py @@ -4,9 +4,9 @@ from pydantic import root_validator -from antarest.study.business.enum_ignore_case import EnumIgnoreCase from antarest.study.business.utils import FieldInfo, FormFieldsBaseModel, execute_or_add_commands from antarest.study.model import Study +from antarest.study.storage.rawstudy.model.filesystem.config.area import AdequacyPatchMode from antarest.study.storage.rawstudy.model.filesystem.folder_node import ChildNotFoundError from antarest.study.storage.storage_service import StudyStorageService from antarest.study.storage.variantstudy.model.command.update_config import UpdateConfig @@ -37,12 +37,6 @@ def decode_filter(encoded_value: Set[str], current_filter: Optional[str] = None) return ", ".join(sort_filter_options(encoded_value)) -class AdequacyPatchMode(EnumIgnoreCase): - OUTSIDE = "outside" - INSIDE = "inside" - VIRTUAL = "virtual" - - class PropertiesFormFields(FormFieldsBaseModel): energy_cost_unsupplied: Optional[float] energy_cost_spilled: Optional[float] From f73f49b683f959060f829bc991ea139d2165cb52 Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Fri, 19 Jan 2024 13:34:25 +0100 Subject: [PATCH 078/147] feat(tablemode): add the `get_all_area_props` method to `AreaManager` --- antarest/study/business/area_management.py | 143 ++++++++++++++++++++- 1 file changed, 139 insertions(+), 4 deletions(-) diff --git a/antarest/study/business/area_management.py b/antarest/study/business/area_management.py index 84265d8da1..2c4fc0dfa9 100644 --- a/antarest/study/business/area_management.py +++ b/antarest/study/business/area_management.py @@ -3,14 +3,20 @@ import re import typing as t -from pydantic import BaseModel +from pydantic import BaseModel, Extra, Field -from antarest.core.exceptions import DuplicateAreaName, LayerNotAllowedToBeDeleted, LayerNotFound -from antarest.study.business.utils import execute_or_add_commands +from antarest.core.exceptions import DuplicateAreaName, ConfigFileNotFound, LayerNotAllowedToBeDeleted, LayerNotFound +from antarest.study.business.utils import AllOptionalMetaclass, camel_case_model, execute_or_add_commands from antarest.study.model import Patch, PatchArea, PatchCluster, RawStudy, Study from antarest.study.repository import StudyMetadataRepository from antarest.study.storage.patch_service import PatchService -from antarest.study.storage.rawstudy.model.filesystem.config.area import AreaUI +from antarest.study.storage.rawstudy.model.filesystem.config.area import ( + AdequacyPathProperties, + AreaFolder, + AreaUI, + OptimizationProperties, + ThermalAreasProperties, +) from antarest.study.storage.rawstudy.model.filesystem.config.model import Area, DistrictSet, transform_name_to_id from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy from antarest.study.storage.storage_service import StudyStorageService @@ -96,15 +102,144 @@ def _get_area_layers(area_uis: t.Dict[str, t.Any], area: str) -> t.List[str]: return [] +_ALL_AREAS_PATH = "input/areas" +_THERMAL_AREAS_PATH = "input/thermal/areas" + + +# noinspection SpellCheckingInspection +class _BaseAreaDTO( + OptimizationProperties.FilteringSection, + OptimizationProperties.ModalOptimizationSection, + AdequacyPathProperties.AdequacyPathSection, + extra=Extra.forbid, + validate_assignment=True, + allow_population_by_field_name=True, +): + """ + Represents an area output. + + Aggregates the fields of the `OptimizationProperties` and `AdequacyPathProperties` classes, + but without the `UIProperties` fields. + + Add the fields extracted from the `/input/thermal/areas.ini` information: + + - `average_unsupplied_energy_cost` is extracted from `unserverd_energy_cost`, + - `average_spilled_energy_cost` is extracted from `spilled_energy_cost`. + """ + + average_unsupplied_energy_cost: float = Field(0.0, description="average unserverd energy cost (€/MWh)") + average_spilled_energy_cost: float = Field(0.0, description="average spilled energy cost (€/MWh)") + + +# noinspection SpellCheckingInspection +@camel_case_model +class GetAreaDTO(_BaseAreaDTO, metaclass=AllOptionalMetaclass): + """ + DTO object use to get the area information using a flat structure. + """ + + @classmethod + def create_area_dto( + cls, + area_folder: AreaFolder, + *, + average_unsupplied_energy_cost: float, + average_spilled_energy_cost: float, + ) -> "GetAreaDTO": + """ + Creates a `GetAreaDTO` object from configuration data. + + Args: + area_folder: Configuration data read from the `/input/areas/` information. + average_unsupplied_energy_cost: Unserverd energy cost (€/MWh). + average_spilled_energy_cost: Spilled energy cost (€/MWh). + Returns: + The `GetAreaDTO` object. + """ + obj = { + "average_unsupplied_energy_cost": average_unsupplied_energy_cost, + "average_spilled_energy_cost": average_spilled_energy_cost, + **area_folder.optimization.filtering.dict(by_alias=False), + **area_folder.optimization.nodal_optimization.dict(by_alias=False), + # adequacy_patch is only available if study version >= 830. + **(area_folder.adequacy_patch.adequacy_patch.dict(by_alias=False) if area_folder.adequacy_patch else {}), + } + return cls(**obj) + + class AreaManager: + """ + Manages operations related to areas in a study, including retrieval, creation, and updates. + + Attributes: + storage_service: The service responsible for study storage operations. + patch_service: The service responsible for study patch operations. + This service is used to store additional data for each area, in particular the country + of origin (`country`) and a list of tags for searching (`tags`). + """ + def __init__( self, storage_service: StudyStorageService, repository: StudyMetadataRepository, ) -> None: + """ + Initializes the AreaManager. + + Args: + storage_service: The service responsible for study storage operations. + repository: The repository for study metadata operations. + """ self.storage_service = storage_service self.patch_service = PatchService(repository=repository) + # noinspection SpellCheckingInspection + def get_all_area_props(self, study: RawStudy) -> t.Mapping[str, GetAreaDTO]: + """ + Retrieves all areas of a study. + + Args: + study: The raw study object. + Returns: + A mapping of area IDs to area properties. + Raises: + ConfigFileNotFound: if a configuration file is not found. + """ + file_study = self.storage_service.get_storage(study).get_raw(study) + + # Get the area information from the `/input/areas` file. + path = _ALL_AREAS_PATH + try: + areas_cfg = file_study.tree.get(path.split("/"), depth=5) + except KeyError: + raise ConfigFileNotFound(path) from None + else: + # "list" and "sets" must be removed: we only need areas. + areas_cfg.pop("list", None) + areas_cfg.pop("sets", None) + + # Get the unserverd and spilled energy costs from the `/input/thermal/areas.ini` file. + path = _THERMAL_AREAS_PATH + try: + thermal_cfg = file_study.tree.get(path.split("/"), depth=3) + except KeyError: + raise ConfigFileNotFound(path) from None + else: + thermal_areas = ThermalAreasProperties(**thermal_cfg) + + # areas_cfg contains a dictionary where the keys are the area IDs, + # and the values are objects that can be converted to `AreaFolder`. + area_map = {} + for area_id, area_cfg in areas_cfg.items(): + area_folder = AreaFolder(**area_cfg) + area_map[area_id] = GetAreaDTO.create_area_dto( + area_folder, + average_unsupplied_energy_cost=thermal_areas.unserverd_energy_cost.get(area_id, 0.0), + average_spilled_energy_cost=thermal_areas.spilled_energy_cost.get(area_id, 0.0), + ) + + return area_map + def get_all_areas(self, study: RawStudy, area_type: t.Optional[AreaType] = None) -> t.List[AreaInfoDTO]: """ Retrieves all areas and districts of a raw study based on the area type. From dde6df99df6df55723f0186499314f9edec5af63 Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Fri, 19 Jan 2024 13:35:08 +0100 Subject: [PATCH 079/147] feat(tablemode): improve and simplify the `TableModeManager` class --- antarest/study/business/area_management.py | 2 +- .../business/areas/renewable_management.py | 2 +- .../business/areas/st_storage_management.py | 2 +- .../business/areas/thermal_management.py | 2 +- .../study/business/table_mode_management.py | 180 ++++++---- antarest/study/service.py | 9 +- antarest/study/web/study_data_blueprint.py | 27 +- .../study_data_blueprint/test_table_mode.py | 323 +++++++++++++++++- .../areas/test_st_storage_management.py | 4 +- 9 files changed, 458 insertions(+), 93 deletions(-) diff --git a/antarest/study/business/area_management.py b/antarest/study/business/area_management.py index 2c4fc0dfa9..b03f0cdae9 100644 --- a/antarest/study/business/area_management.py +++ b/antarest/study/business/area_management.py @@ -207,7 +207,7 @@ def get_all_area_props(self, study: RawStudy) -> t.Mapping[str, GetAreaDTO]: """ file_study = self.storage_service.get_storage(study).get_raw(study) - # Get the area information from the `/input/areas` file. + # Get the area information from the `/input/areas/` file. path = _ALL_AREAS_PATH try: areas_cfg = file_study.tree.get(path.split("/"), depth=5) diff --git a/antarest/study/business/areas/renewable_management.py b/antarest/study/business/areas/renewable_management.py index 4d51827c7a..17f2b33c55 100644 --- a/antarest/study/business/areas/renewable_management.py +++ b/antarest/study/business/areas/renewable_management.py @@ -146,7 +146,7 @@ def get_clusters(self, study: Study, area_id: str) -> t.Sequence[RenewableCluste return [create_renewable_output(study.version, cluster_id, cluster) for cluster_id, cluster in clusters.items()] - def get_all_clusters( + def get_all_renewable_props( self, study: Study, ) -> t.Mapping[str, t.Sequence[RenewableClusterOutput]]: diff --git a/antarest/study/business/areas/st_storage_management.py b/antarest/study/business/areas/st_storage_management.py index a54c583763..6ace7a760f 100644 --- a/antarest/study/business/areas/st_storage_management.py +++ b/antarest/study/business/areas/st_storage_management.py @@ -328,7 +328,7 @@ def get_storages( storages = [create_storage_output(study_version, storage_id, options) for storage_id, options in config.items()] return sorted(storages, key=order_by) - def get_all_storages( + def get_all_storage_props( self, study: Study, ) -> t.Mapping[str, t.Sequence[STStorageOutput]]: diff --git a/antarest/study/business/areas/thermal_management.py b/antarest/study/business/areas/thermal_management.py index 73b7431742..7e6eec9256 100644 --- a/antarest/study/business/areas/thermal_management.py +++ b/antarest/study/business/areas/thermal_management.py @@ -187,7 +187,7 @@ def get_clusters( study_version = study.version return [create_thermal_output(study_version, cluster_id, cluster) for cluster_id, cluster in clusters.items()] - def get_all_clusters( + def get_all_thermal_props( self, study: Study, ) -> t.Mapping[str, t.Sequence[ThermalClusterOutput]]: diff --git a/antarest/study/business/table_mode_management.py b/antarest/study/business/table_mode_management.py index d96c70c269..905b630786 100644 --- a/antarest/study/business/table_mode_management.py +++ b/antarest/study/business/table_mode_management.py @@ -1,16 +1,22 @@ -from typing import Any, Dict, List, Optional, TypedDict, Union, cast +import typing as t + +import pandas as pd from pydantic import Field -from antarest.study.business.areas.properties_management import AdequacyPatchMode -from antarest.study.business.areas.renewable_management import TimeSeriesInterpretation + +from antarest.study.business.area_management import AreaManager +from antarest.study.business.areas.renewable_management import RenewableManager, TimeSeriesInterpretation +from antarest.study.business.areas.st_storage_management import STStorageManager +from antarest.study.business.areas.thermal_management import ThermalManager from antarest.study.business.binding_constraint_management import BindingConstraintManager from antarest.study.business.enum_ignore_case import EnumIgnoreCase -from antarest.study.business.utils import FormFieldsBaseModel, execute_or_add_commands, AllOptionalMetaclass +from antarest.study.business.link_management import LinkManager +from antarest.study.business.utils import AllOptionalMetaclass, FormFieldsBaseModel, execute_or_add_commands from antarest.study.common.default_values import FilteringOptions, LinkProperties, NodalOptimization from antarest.study.model import RawStudy +from antarest.study.storage.rawstudy.model.filesystem.config.area import AdequacyPatchMode from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import BindingConstraintFrequency from antarest.study.storage.rawstudy.model.filesystem.config.thermal import LawOption, LocalTSGenerationBehavior from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy -from antarest.study.storage.storage_service import StudyStorageService from antarest.study.storage.variantstudy.model.command.icommand import ICommand from antarest.study.storage.variantstudy.model.command.update_binding_constraint import UpdateBindingConstraint from antarest.study.storage.variantstudy.model.command.update_config import UpdateConfig @@ -31,6 +37,7 @@ class TableTemplateType(EnumIgnoreCase): LINK = "link" THERMAL_CLUSTER = "thermal cluster" RENEWABLE_CLUSTER = "renewable cluster" + ST_STORAGE = "short-term storage" BINDING_CONSTRAINT = "binding constraint" @@ -187,7 +194,7 @@ class ThermalClusterColumns(FormFieldsBaseModel, metaclass=AllOptionalMetaclass) path=f"{THERMAL_CLUSTER_PATH}/spread-cost", ) ts_gen: LocalTSGenerationBehavior = Field( - default=LocalTSGenerationBehavior.USE_GLOBAL_PARAMETER.value, + default=LocalTSGenerationBehavior.USE_GLOBAL.value, path=f"{THERMAL_CLUSTER_PATH}/gen-ts", ) volatility_forced: int = Field( @@ -285,18 +292,18 @@ class BindingConstraintColumns(FormFieldsBaseModel, metaclass=AllOptionalMetacla default=True, path=f"{BINDING_CONSTRAINT_PATH}/enabled", ) - group: Optional[str] = Field( + group: t.Optional[str] = Field( default="default", path=f"{BINDING_CONSTRAINT_PATH}/group", ) -class ColumnInfo(TypedDict): +class ColumnInfo(t.TypedDict): path: str - default_value: Any + default_value: t.Any -class PathVars(TypedDict, total=False): +class PathVars(t.TypedDict, total=False): # Area id: str # Link @@ -307,17 +314,7 @@ class PathVars(TypedDict, total=False): cluster: str -AREA_PATH = "input/areas/{area}" -THERMAL_PATH = "input/thermal/areas" -LINK_GLOB_PATH = "input/links/{area1}/properties" -LINK_PATH = f"{LINK_GLOB_PATH}/{{area2}}" -CLUSTER_GLOB_PATH = "input/thermal/clusters/{area}/list" -CLUSTER_PATH = f"{CLUSTER_GLOB_PATH}/{{cluster}}" -RENEWABLE_GLOB_PATH = "input/renewables/clusters/{area}/list" -RENEWABLE_PATH = f"{RENEWABLE_GLOB_PATH}/{{cluster}}" -BINDING_CONSTRAINT_PATH = "input/bindingconstraints/bindingconstraints" - -FIELDS_INFO_BY_TYPE: Dict[TableTemplateType, Dict[str, ColumnInfo]] = { +FIELDS_INFO_BY_TYPE: t.Dict[TableTemplateType, t.Dict[str, ColumnInfo]] = { TableTemplateType.AREA: { "non_dispatchable_power": { "path": f"{AREA_PATH}/optimization/nodal optimization/non-dispatchable-power", @@ -402,107 +399,107 @@ class PathVars(TypedDict, total=False): "default_value": FilteringOptions.FILTER_YEAR_BY_YEAR, }, }, - TableTemplateType.CLUSTER: { + TableTemplateType.THERMAL_CLUSTER: { "group": { - "path": f"{CLUSTER_PATH}/group", + "path": f"{THERMAL_CLUSTER_PATH}/group", "default_value": "", }, "enabled": { - "path": f"{CLUSTER_PATH}/enabled", + "path": f"{THERMAL_CLUSTER_PATH}/enabled", "default_value": True, }, "must_run": { - "path": f"{CLUSTER_PATH}/must-run", + "path": f"{THERMAL_CLUSTER_PATH}/must-run", "default_value": False, }, "unit_count": { - "path": f"{CLUSTER_PATH}/unitcount", + "path": f"{THERMAL_CLUSTER_PATH}/unitcount", "default_value": 0, }, "nominal_capacity": { - "path": f"{CLUSTER_PATH}/nominalcapacity", + "path": f"{THERMAL_CLUSTER_PATH}/nominalcapacity", "default_value": 0, }, "min_stable_power": { - "path": f"{CLUSTER_PATH}/min-stable-power", + "path": f"{THERMAL_CLUSTER_PATH}/min-stable-power", "default_value": 0, }, "spinning": { - "path": f"{CLUSTER_PATH}/spinning", + "path": f"{THERMAL_CLUSTER_PATH}/spinning", "default_value": 0, }, "min_up_time": { - "path": f"{CLUSTER_PATH}/min-up-time", + "path": f"{THERMAL_CLUSTER_PATH}/min-up-time", "default_value": 1, }, "min_down_time": { - "path": f"{CLUSTER_PATH}/min-down-time", + "path": f"{THERMAL_CLUSTER_PATH}/min-down-time", "default_value": 1, }, "co2": { - "path": f"{CLUSTER_PATH}/co2", + "path": f"{THERMAL_CLUSTER_PATH}/co2", "default_value": 0, }, "marginal_cost": { - "path": f"{CLUSTER_PATH}/marginal-cost", + "path": f"{THERMAL_CLUSTER_PATH}/marginal-cost", "default_value": 0, }, "fixed_cost": { - "path": f"{CLUSTER_PATH}/fixed-cost", + "path": f"{THERMAL_CLUSTER_PATH}/fixed-cost", "default_value": 0, }, "startup_cost": { - "path": f"{CLUSTER_PATH}/startup-cost", + "path": f"{THERMAL_CLUSTER_PATH}/startup-cost", "default_value": 0, }, "market_bid_cost": { - "path": f"{CLUSTER_PATH}/market-bid-cost", + "path": f"{THERMAL_CLUSTER_PATH}/market-bid-cost", "default_value": 0, }, "spread_cost": { - "path": f"{CLUSTER_PATH}/spread-cost", + "path": f"{THERMAL_CLUSTER_PATH}/spread-cost", "default_value": 0, }, "ts_gen": { - "path": f"{CLUSTER_PATH}/gen-ts", + "path": f"{THERMAL_CLUSTER_PATH}/gen-ts", "default_value": LocalTSGenerationBehavior.USE_GLOBAL.value, }, "volatility_forced": { - "path": f"{CLUSTER_PATH}/volatility.forced", + "path": f"{THERMAL_CLUSTER_PATH}/volatility.forced", "default_value": 0, }, "volatility_planned": { - "path": f"{CLUSTER_PATH}/volatility.planned", + "path": f"{THERMAL_CLUSTER_PATH}/volatility.planned", "default_value": 0, }, "law_forced": { - "path": f"{CLUSTER_PATH}/law.forced", + "path": f"{THERMAL_CLUSTER_PATH}/law.forced", "default_value": LawOption.UNIFORM.value, }, "law_planned": { - "path": f"{CLUSTER_PATH}/law.planned", + "path": f"{THERMAL_CLUSTER_PATH}/law.planned", "default_value": LawOption.UNIFORM.value, }, }, - TableTemplateType.RENEWABLE: { + TableTemplateType.RENEWABLE_CLUSTER: { "group": { - "path": f"{RENEWABLE_PATH}/group", + "path": f"{RENEWABLE_CLUSTER_PATH}/group", "default_value": "", }, "ts_interpretation": { - "path": f"{RENEWABLE_PATH}/ts-interpretation", + "path": f"{RENEWABLE_CLUSTER_PATH}/ts-interpretation", "default_value": TimeSeriesInterpretation.POWER_GENERATION.value, }, "enabled": { - "path": f"{RENEWABLE_PATH}/enabled", + "path": f"{RENEWABLE_CLUSTER_PATH}/enabled", "default_value": True, }, "unit_count": { - "path": f"{RENEWABLE_PATH}/unitcount", + "path": f"{RENEWABLE_CLUSTER_PATH}/unitcount", "default_value": 0, }, "nominal_capacity": { - "path": f"{RENEWABLE_PATH}/nominalcapacity", + "path": f"{RENEWABLE_CLUSTER_PATH}/nominalcapacity", "default_value": 0, }, }, @@ -534,7 +531,7 @@ class PathVars(TypedDict, total=False): TableTemplateType.BINDING_CONSTRAINT: BindingConstraintColumns, } -ColumnsModelTypes = Union[ +ColumnsModelTypes = t.Union[ AreaColumns, LinkColumns, ThermalClusterColumns, @@ -543,7 +540,7 @@ class PathVars(TypedDict, total=False): ] -def _get_glob_object(file_study: FileStudy, table_type: TableTemplateType) -> Dict[str, Any]: +def _get_glob_object(file_study: FileStudy, table_type: TableTemplateType) -> t.Dict[str, t.Any]: """ Retrieves the fields of an object according to its type (area, link, thermal cluster...). @@ -559,7 +556,7 @@ def _get_glob_object(file_study: FileStudy, table_type: TableTemplateType) -> Di """ # sourcery skip: extract-method if table_type == TableTemplateType.AREA: - info_map: Dict[str, Any] = file_study.tree.get(url=AREA_PATH.format(area="*").split("/"), depth=3) + info_map: t.Dict[str, t.Any] = file_study.tree.get(url=AREA_PATH.format(area="*").split("/"), depth=3) area_ids = list(file_study.config.areas) # If there is only one ID in the `area_ids`, the result returned from # the `file_study.tree.get` call will be a single object. @@ -588,7 +585,7 @@ def _get_glob_object(file_study: FileStudy, table_type: TableTemplateType) -> Di return {} -def _get_value(path: List[str], data: Dict[str, Any], default_value: Any) -> Any: +def _get_value(path: t.List[str], data: t.Dict[str, t.Any], default_value: t.Any) -> t.Any: if len(path): return _get_value(path[1:], data.get(path[0], {}), default_value) return data if data != {} else default_value @@ -597,7 +594,7 @@ def _get_value(path: List[str], data: Dict[str, Any], default_value: Any) -> Any def _get_relative_path( table_type: TableTemplateType, path: str, -) -> List[str]: +) -> t.List[str]: base_path = "" path_arr = path.split("/") @@ -626,7 +623,7 @@ def _get_column_path( path_vars: PathVars, ) -> str: columns_model = COLUMNS_MODELS_BY_TYPE[table_type] - path = cast(str, columns_model.__fields__[column].field_info.extra["path"]) + path = t.cast(str, columns_model.__fields__[column].field_info.extra["path"]) if table_type == TableTemplateType.AREA: return path.format(area=path_vars["id"]) @@ -663,22 +660,81 @@ def _get_path_vars_from_key( return PathVars() +_TableIndex = str # row name +_TableColumn = str # column name +_CellValue = t.Any # cell value (str, int, float, bool, enum, etc.) +TableDataDTO = t.Mapping[_TableIndex, t.Mapping[_TableColumn, _CellValue]] + + class TableModeManager: - def __init__(self, storage_service: StudyStorageService) -> None: - self.storage_service = storage_service + def __init__( + self, + area_manager: AreaManager, + link_manager: LinkManager, + thermal_manager: ThermalManager, + renewable_manager: RenewableManager, + st_storage_manager: STStorageManager, + binding_constraint_manager: BindingConstraintManager, + ) -> None: + self._area_manager = area_manager + self._link_manager = link_manager + self._thermal_manager = thermal_manager + self._renewable_manager = renewable_manager + self._st_storage_manager = st_storage_manager + self._binding_constraint_manager = binding_constraint_manager def get_table_data( self, study: RawStudy, table_type: TableTemplateType, - columns: List[str], - ) -> Dict[str, ColumnsModelTypes]: + columns: t.Sequence[_TableColumn], + ) -> TableDataDTO: + if table_type == TableTemplateType.AREA: + areas_map = self._area_manager.get_all_area_props(study) + data = {area_id: area.dict(by_alias=True) for area_id, area in areas_map.items()} + elif table_type == TableTemplateType.LINK: + pass + elif table_type == TableTemplateType.THERMAL_CLUSTER: + clusters_map = self._thermal_manager.get_all_thermal_props(study) + data = { + f"{area_id} / {cluster.id}": cluster.dict(by_alias=True) + for area_id, clusters in clusters_map.items() + for cluster in clusters + } + elif table_type == TableTemplateType.RENEWABLE_CLUSTER: + clusters_map = self._renewable_manager.get_all_renewable_props(study) + data = { + f"{area_id} / {cluster.id}": cluster.dict(by_alias=True) + for area_id, clusters in clusters_map.items() + for cluster in clusters + } + elif table_type == TableTemplateType.ST_STORAGE: + storage_map = self._st_storage_manager.get_all_storage_props(study) + data = { + f"{area_id} / {storage.id}": storage.dict(by_alias=True) + for area_id, storages in storage_map.items() + for storage in storages + } + elif table_type == TableTemplateType.BINDING_CONSTRAINT: + pass + + df = pd.DataFrame.from_dict(data, orient="index") + if columns: + # Create a new dataframe with the listed columns. + # If a column does not exist in the DataFrame, it is created with empty values, + # because NaN (or `None`) is not JSON-serializable. + df = pd.DataFrame(df, columns=columns) + df = df.where(pd.notna(df), other="") + + obj = df.to_dict(orient="index") + return obj + file_study = self.storage_service.get_storage(study).get_raw(study) columns_model = COLUMNS_MODELS_BY_TYPE[table_type] glob_object = _get_glob_object(file_study, table_type) schema_columns = columns_model.schema()["properties"] - def get_column_value(col: str, data: Dict[str, Any]) -> Any: + def get_column_value(col: str, data: t.Dict[str, t.Any]) -> t.Any: schema = schema_columns[col] relative_path = _get_relative_path(table_type, schema["path"]) return _get_value(relative_path, data, schema["default"]) @@ -695,7 +751,7 @@ def get_column_value(col: str, data: Dict[str, Any]) -> Any: for data in glob_object.values() } - obj: Dict[str, Any] = {} + obj: t.Dict[str, t.Any] = {} for id_1, value_1 in glob_object.items(): for id_2, value_2 in value_1.items(): obj[f"{id_1} / {id_2}"] = columns_model.construct( @@ -708,9 +764,9 @@ def set_table_data( self, study: RawStudy, table_type: TableTemplateType, - data: Dict[str, ColumnsModelTypes], + data: t.Dict[str, ColumnsModelTypes], ) -> None: - commands: List[ICommand] = [] + commands: t.List[ICommand] = [] bindings_by_id = None command_context = self.storage_service.variant_study_service.command_factory.command_context diff --git a/antarest/study/service.py b/antarest/study/service.py index 8e6ffa37ea..5bbdca35e2 100644 --- a/antarest/study/service.py +++ b/antarest/study/service.py @@ -276,13 +276,20 @@ def __init__( self.thermal_manager = ThermalManager(self.storage_service) self.st_storage_manager = STStorageManager(self.storage_service) self.ts_config_manager = TimeSeriesConfigManager(self.storage_service) - self.table_mode_manager = TableModeManager(self.storage_service) self.playlist_manager = PlaylistManager(self.storage_service) self.scenario_builder_manager = ScenarioBuilderManager(self.storage_service) self.xpansion_manager = XpansionManager(self.storage_service) self.matrix_manager = MatrixManager(self.storage_service) self.binding_constraint_manager = BindingConstraintManager(self.storage_service) self.correlation_manager = CorrelationManager(self.storage_service) + self.table_mode_manager = TableModeManager( + self.areas, + self.links, + self.thermal_manager, + self.renewable_manager, + self.st_storage_manager, + self.binding_constraint_manager, + ) self.cache_service = cache_service self.config = config self.on_deletion_callbacks: t.List[t.Callable[[str], None]] = [] diff --git a/antarest/study/web/study_data_blueprint.py b/antarest/study/web/study_data_blueprint.py index 5a2ebc347a..2133d9819f 100644 --- a/antarest/study/web/study_data_blueprint.py +++ b/antarest/study/web/study_data_blueprint.py @@ -54,18 +54,15 @@ from antarest.study.business.link_management import LinkInfoDTO from antarest.study.business.optimization_management import OptimizationFormFields from antarest.study.business.playlist_management import PlaylistColumns -from antarest.study.business.table_mode_management import ( - BindingConstraintOperator, - ColumnsModelTypes, - TableTemplateType, -) +from antarest.study.business.table_mode_management import TableDataDTO, TableTemplateType from antarest.study.business.thematic_trimming_field_infos import ThematicTrimmingFormFields from antarest.study.business.timeseries_config_management import TSFormFields from antarest.study.model import PatchArea, PatchCluster from antarest.study.service import StudyService from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import BindingConstraintFrequency -from antarest.study.storage.rawstudy.model.filesystem.config.model import transform_name_to_id from antarest.study.storage.rawstudy.model.filesystem.config.area import AreaUI +from antarest.study.storage.rawstudy.model.filesystem.config.model import transform_name_to_id +from antarest.study.storage.variantstudy.model.command.common import BindingConstraintOperator logger = logging.getLogger(__name__) @@ -845,24 +842,22 @@ def set_timeseries_form_values( path="/studies/{uuid}/tablemode", tags=[APITag.study_data], summary="Get table data for table form", - # `Any` because `Union[AreaColumns, LinkColumns]` not working - response_model=t.Dict[str, t.Dict[str, t.Any]], - response_model_exclude_none=True, ) def get_table_mode( uuid: str, table_type: TableTemplateType, columns: str, current_user: JWTUser = Depends(auth.get_current_user), - ) -> t.Dict[str, ColumnsModelTypes]: + ) -> TableDataDTO: logger.info( f"Getting template table data for study {uuid}", extra={"user": current_user.id}, ) params = RequestParameters(user=current_user) study = study_service.check_study_access(uuid, StudyPermissionType.READ, params) - - return study_service.table_mode_manager.get_table_data(study, table_type, columns.split(",")) + column_list = columns.split(",") if columns else [] + table_data = study_service.table_mode_manager.get_table_data(study, table_type, column_list) + return table_data @bp.put( path="/studies/{uuid}/tablemode", @@ -872,17 +867,17 @@ def get_table_mode( def set_table_mode( uuid: str, table_type: TableTemplateType, - data: t.Dict[str, ColumnsModelTypes], + data: TableDataDTO, current_user: JWTUser = Depends(auth.get_current_user), - ) -> None: + ) -> TableDataDTO: logger.info( f"Updating table data for study {uuid}", extra={"user": current_user.id}, ) params = RequestParameters(user=current_user) study = study_service.check_study_access(uuid, StudyPermissionType.WRITE, params) - - study_service.table_mode_manager.set_table_data(study, table_type, data) + table_data = study_service.table_mode_manager.set_table_data(study, table_type, data) + return table_data @bp.post( "/studies/_update_version", diff --git a/tests/integration/study_data_blueprint/test_table_mode.py b/tests/integration/study_data_blueprint/test_table_mode.py index 61bede52ce..ca0b51954e 100644 --- a/tests/integration/study_data_blueprint/test_table_mode.py +++ b/tests/integration/study_data_blueprint/test_table_mode.py @@ -1,14 +1,11 @@ -import re - -import numpy as np import pytest from starlette.testclient import TestClient from antarest.core.tasks.model import TaskStatus -from antarest.study.storage.rawstudy.model.filesystem.config.model import transform_name_to_id from tests.integration.utils import wait_task_completion +# noinspection SpellCheckingInspection @pytest.mark.unit_test class TestTableMode: """ @@ -24,40 +21,350 @@ def test_lifecycle__nominal( user_access_token: str, study_id: str, ) -> None: - # we are working with the "DE" area - area_id = "de" user_headers = {"Authorization": f"Bearer {user_access_token}"} + # In order to test the table mode for renewable clusters and short-term storage, + # it is required that the study is either in version 8.1 for renewable energies + # or in version 8.6 for short-term storage and that the renewable clusters are enabled + # in the study configuration. + + # Upgrade the study to version 8.6 + res = client.put( + f"/v1/studies/{study_id}/upgrade", + headers={"Authorization": f"Bearer {user_access_token}"}, + params={"target_version": 860}, + ) + assert res.status_code == 200, res.json() + + task_id = res.json() + task = wait_task_completion(client, user_access_token, task_id) + assert task.status == TaskStatus.COMPLETED, task + + # Parameter 'renewable-generation-modelling' must be set to 'clusters' instead of 'aggregated'. + # The `enr_modelling` value must be set to "clusters" instead of "aggregated" + args = { + "target": "settings/generaldata/other preferences", + "data": {"renewable-generation-modelling": "clusters"}, + } + res = client.post( + f"/v1/studies/{study_id}/commands", + headers={"Authorization": f"Bearer {user_access_token}"}, + json=[{"action": "update_config", "args": args}], + ) + assert res.status_code == 200, res.json() + # Table Mode - Area + # ================= + res = client.get( - f"/v1/studies/{study_id}/tablemode/form", + f"/v1/studies/{study_id}/tablemode", headers=user_headers, params={ "table_type": "area", - "columns": ",".join(["nonDispatchablePower", "dispatchableHydroPower", "otherDispatchablePower"]), + "columns": ",".join( + [ + "nonDispatchablePower", + "dispatchableHydroPower", + "otherDispatchablePower", + "averageUnsuppliedEnergyCost", + "spreadUnsuppliedEnergyCost", + "averageSpilledEnergyCost", + "spreadSpilledEnergyCost", + "filterSynthesis", + "filterYearByYear", + "adequacyPatchMode", + ] + ), }, ) assert res.status_code == 200, res.json() expected = { "de": { + "adequacyPatchMode": "outside", + "averageSpilledEnergyCost": 0, + "averageUnsuppliedEnergyCost": 3000, "dispatchableHydroPower": True, + "filterSynthesis": "daily, monthly", + "filterYearByYear": "hourly, weekly, annual", "nonDispatchablePower": True, "otherDispatchablePower": True, + "spreadSpilledEnergyCost": 0, + "spreadUnsuppliedEnergyCost": 0, }, "es": { + "adequacyPatchMode": "outside", + "averageSpilledEnergyCost": 0, + "averageUnsuppliedEnergyCost": 3000, "dispatchableHydroPower": True, + "filterSynthesis": "daily, monthly", + "filterYearByYear": "hourly, weekly, annual", "nonDispatchablePower": True, "otherDispatchablePower": True, + "spreadSpilledEnergyCost": 0, + "spreadUnsuppliedEnergyCost": 0, }, "fr": { + "adequacyPatchMode": "outside", + "averageSpilledEnergyCost": 0, + "averageUnsuppliedEnergyCost": 3000, "dispatchableHydroPower": True, + "filterSynthesis": "", + "filterYearByYear": "hourly", "nonDispatchablePower": True, "otherDispatchablePower": True, + "spreadSpilledEnergyCost": 0, + "spreadUnsuppliedEnergyCost": 0, }, "it": { + "adequacyPatchMode": "outside", + "averageSpilledEnergyCost": 0, + "averageUnsuppliedEnergyCost": 3000, "dispatchableHydroPower": True, + "filterSynthesis": "", + "filterYearByYear": "hourly", "nonDispatchablePower": True, "otherDispatchablePower": True, + "spreadSpilledEnergyCost": 0, + "spreadUnsuppliedEnergyCost": 0, + }, + } + actual = res.json() + assert actual == expected + + # Table Mode - Thermal Clusters + # ============================= + + res = client.get( + f"/v1/studies/{study_id}/tablemode", + headers=user_headers, + params={ + "table_type": "thermal cluster", + "columns": ",".join(["group", "unitCount", "nominalCapacity", "so2"]), + }, + ) + assert res.status_code == 200, res.json() + expected = { + "de / 01_solar": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, + "de / 02_wind_on": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, + "de / 03_wind_off": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, + "de / 04_res": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, + "de / 05_nuclear": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, + "de / 06_coal": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, + "de / 07_gas": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, + "de / 08_non-res": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, + "de / 09_hydro_pump": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, + "es / 01_solar": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, + "es / 02_wind_on": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, + "es / 03_wind_off": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, + "es / 04_res": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, + "es / 05_nuclear": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, + "es / 06_coal": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, + "es / 07_gas": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, + "es / 08_non-res": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, + "es / 09_hydro_pump": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, + "fr / 01_solar": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, + "fr / 02_wind_on": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, + "fr / 03_wind_off": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, + "fr / 04_res": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, + "fr / 05_nuclear": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, + "fr / 06_coal": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, + "fr / 07_gas": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, + "fr / 08_non-res": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, + "fr / 09_hydro_pump": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, + "it / 01_solar": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, + "it / 02_wind_on": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, + "it / 03_wind_off": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, + "it / 04_res": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, + "it / 05_nuclear": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, + "it / 06_coal": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, + "it / 07_gas": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, + "it / 08_non-res": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, + "it / 09_hydro_pump": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, + } + actual = res.json() + assert actual == expected + + # Table Mode - Renewable Clusters + # =============================== + + # Prepare data for renewable clusters tests + generators_by_country = { + "fr": { + "La Rochelle": { + "name": "La Rochelle", + "group": "solar pv", + "nominalCapacity": 2.1, + "unitCount": 1, + "tsInterpretation": "production-factor", + }, + "Oleron": { + "name": "Oleron", + "group": "wind offshore", + "nominalCapacity": 15, + "unitCount": 70, + "tsInterpretation": "production-factor", + }, + "Dieppe": { + "name": "Dieppe", + "group": "wind offshore", + "nominalCapacity": 8, + "unitCount": 62, + "tsInterpretation": "power-generation", + }, + }, + "it": { + "Sicile": { + "name": "Sicile", + "group": "solar pv", + "nominalCapacity": 1.8, + "unitCount": 1, + "tsInterpretation": "production-factor", + }, + "Sardaigne": { + "name": "Sardaigne", + "group": "wind offshore", + "nominalCapacity": 12, + "unitCount": 86, + "tsInterpretation": "power-generation", + }, + "Pouilles": { + "name": "Pouilles", + "enabled": False, + "group": "wind offshore", + "nominalCapacity": 11, + "unitCount": 40, + "tsInterpretation": "power-generation", + }, + }, + } + + for area_id, generators in generators_by_country.items(): + for generator_id, generator in generators.items(): + res = client.post( + f"/v1/studies/{study_id}/areas/{area_id}/clusters/renewable", + headers=user_headers, + json=generator, + ) + res.raise_for_status() + + res = client.get( + f"/v1/studies/{study_id}/tablemode", + headers=user_headers, + params={ + "table_type": "renewable cluster", + "columns": ",".join(["group", "enabled", "unitCount", "nominalCapacity"]), + }, + ) + assert res.status_code == 200, res.json() + expected = { + "fr / Dieppe": {"enabled": True, "group": "Wind Offshore", "nominalCapacity": 8, "unitCount": 62}, + "fr / La Rochelle": {"enabled": True, "group": "Solar PV", "nominalCapacity": 2.1, "unitCount": 1}, + "fr / Oleron": {"enabled": True, "group": "Wind Offshore", "nominalCapacity": 15, "unitCount": 70}, + "it / Pouilles": {"enabled": False, "group": "Wind Offshore", "nominalCapacity": 11, "unitCount": 40}, + "it / Sardaigne": {"enabled": True, "group": "Wind Offshore", "nominalCapacity": 12, "unitCount": 86}, + "it / Sicile": {"enabled": True, "group": "Solar PV", "nominalCapacity": 1.8, "unitCount": 1}, + } + actual = res.json() + assert actual == expected + + # Table Mode - Short Term Storage + # =============================== + + # Prepare data for short-term storage tests + storage_by_country = { + "fr": { + "siemens": { + "name": "Siemens", + "group": "battery", + "injectionNominalCapacity": 1500, + "withdrawalNominalCapacity": 1500, + "reservoirCapacity": 1500, + "initialLevel": 0.5, + "initialLevelOptim": False, + }, + "tesla": { + "name": "Tesla", + "group": "battery", + "injectionNominalCapacity": 1200, + "withdrawalNominalCapacity": 1200, + "reservoirCapacity": 1200, + "initialLevelOptim": True, + }, + }, + "it": { + "storage3": { + "name": "storage3", + "group": "psp_open", + "injectionNominalCapacity": 1234, + "withdrawalNominalCapacity": 1020, + "reservoirCapacity": 1357, + "initialLevel": 1, + "initialLevelOptim": False, + }, + "storage4": { + "name": "storage4", + "group": "psp_open", + "injectionNominalCapacity": 567, + "withdrawalNominalCapacity": 456, + "reservoirCapacity": 500, + "initialLevelOptim": True, + }, + }, + } + for area_id, storages in storage_by_country.items(): + for storage_id, storage in storages.items(): + res = client.post( + f"/v1/studies/{study_id}/areas/{area_id}/storages", + headers=user_headers, + json=storage, + ) + res.raise_for_status() + + res = client.get( + f"/v1/studies/{study_id}/tablemode", + headers=user_headers, + params={ + "table_type": "short-term storage", + "columns": ",".join( + [ + "group", + "injectionNominalCapacity", + "withdrawalNominalCapacity", + "reservoirCapacity", + "unknowColumn", + ] + ), + }, + ) + assert res.status_code == 200, res.json() + expected = { + "fr / siemens": { + "group": "Battery", + "injectionNominalCapacity": 1500, + "reservoirCapacity": 1500, + "withdrawalNominalCapacity": 1500, + "unknowColumn": "", + }, + "fr / tesla": { + "group": "Battery", + "injectionNominalCapacity": 1200, + "reservoirCapacity": 1200, + "withdrawalNominalCapacity": 1200, + "unknowColumn": "", + }, + "it / storage3": { + "group": "PSP_open", + "injectionNominalCapacity": 1234, + "reservoirCapacity": 1357, + "withdrawalNominalCapacity": 1020, + "unknowColumn": "", + }, + "it / storage4": { + "group": "PSP_open", + "injectionNominalCapacity": 567, + "reservoirCapacity": 500, + "withdrawalNominalCapacity": 456, + "unknowColumn": "", }, } actual = res.json() diff --git a/tests/study/business/areas/test_st_storage_management.py b/tests/study/business/areas/test_st_storage_management.py index f81171831b..5db040716d 100644 --- a/tests/study/business/areas/test_st_storage_management.py +++ b/tests/study/business/areas/test_st_storage_management.py @@ -131,7 +131,7 @@ def test_get_all_storages__nominal_case( manager = STStorageManager(study_storage_service) # run - all_storages = manager.get_all_storages(study) + all_storages = manager.get_all_storage_props(study) # Check actual = {area_id: [form.dict(by_alias=True) for form in forms] for area_id, forms in all_storages.items()} @@ -204,7 +204,7 @@ def test_get_all_storages__config_not_found( # run with pytest.raises(STStorageConfigNotFound, match="not found"): - manager.get_all_storages(study) + manager.get_all_storage_props(study) def test_get_st_storages__nominal_case( self, From cd681d753e24eae7b919fd5833ed28d98a3cfa1d Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Wed, 7 Feb 2024 10:56:49 +0100 Subject: [PATCH 080/147] style(link): use `t` alias to import `typing` --- antarest/study/business/link_management.py | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/antarest/study/business/link_management.py b/antarest/study/business/link_management.py index 971b0ca376..38358b052b 100644 --- a/antarest/study/business/link_management.py +++ b/antarest/study/business/link_management.py @@ -1,9 +1,9 @@ -from typing import Any, Dict, List, Optional +import typing as t from pydantic import BaseModel from antarest.study.business.utils import execute_or_add_commands -from antarest.study.model import Study +from antarest.study.model import RawStudy from antarest.study.storage.storage_service import StudyStorageService from antarest.study.storage.variantstudy.model.command.create_link import CreateLink from antarest.study.storage.variantstudy.model.command.remove_link import RemoveLink @@ -18,7 +18,7 @@ class LinkUIDTO(BaseModel): class LinkInfoDTO(BaseModel): area1: str area2: str - ui: Optional[LinkUIDTO] = None + ui: t.Optional[LinkUIDTO] = None class GenericElement(BaseModel): @@ -28,27 +28,27 @@ class GenericElement(BaseModel): class GenericItem(BaseModel): element: GenericElement - item_list: List[GenericElement] + item_list: t.List[GenericElement] class AllCLustersAndLinks(BaseModel): - links: List[GenericItem] - clusters: List[GenericItem] + links: t.List[GenericItem] + clusters: t.List[GenericItem] class LinkManager: def __init__(self, storage_service: StudyStorageService) -> None: self.storage_service = storage_service - def get_all_links(self, study: Study, with_ui: bool = False) -> List[LinkInfoDTO]: + def get_all_links(self, study: RawStudy, with_ui: bool = False) -> t.List[LinkInfoDTO]: file_study = self.storage_service.get_storage(study).get_raw(study) result = [] for area_id, area in file_study.config.areas.items(): - links_config: Optional[Dict[str, Any]] = None + links_config: t.Optional[t.Dict[str, t.Any]] = None if with_ui: links_config = file_study.tree.get(["input", "links", area_id, "properties"]) for link in area.links: - ui_info: Optional[LinkUIDTO] = None + ui_info: t.Optional[LinkUIDTO] = None if with_ui and links_config and link in links_config: ui_info = LinkUIDTO( color=f"{links_config[link].get('colorr', '163')},{links_config[link].get('colorg', '163')},{links_config[link].get('colorb', '163')}", @@ -59,7 +59,7 @@ def get_all_links(self, study: Study, with_ui: bool = False) -> List[LinkInfoDTO return result - def create_link(self, study: Study, link_creation_info: LinkInfoDTO) -> LinkInfoDTO: + def create_link(self, study: RawStudy, link_creation_info: LinkInfoDTO) -> LinkInfoDTO: storage_service = self.storage_service.get_storage(study) file_study = storage_service.get_raw(study) command = CreateLink( @@ -73,7 +73,7 @@ def create_link(self, study: Study, link_creation_info: LinkInfoDTO) -> LinkInfo area2=link_creation_info.area2, ) - def delete_link(self, study: Study, area1_id: str, area2_id: str) -> None: + def delete_link(self, study: RawStudy, area1_id: str, area2_id: str) -> None: file_study = self.storage_service.get_storage(study).get_raw(study) command = RemoveLink( area1=area1_id, From f65a7210af9e9489d504a89113e15927426739f8 Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Wed, 7 Feb 2024 11:17:05 +0100 Subject: [PATCH 081/147] refactor(config): move the `IniProperties` in a specific module for reuse --- .../rawstudy/model/filesystem/config/area.py | 70 +++---------------- .../model/filesystem/config/ini_properties.py | 52 ++++++++++++++ 2 files changed, 63 insertions(+), 59 deletions(-) create mode 100644 antarest/study/storage/rawstudy/model/filesystem/config/ini_properties.py diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/area.py b/antarest/study/storage/rawstudy/model/filesystem/config/area.py index c62e31436a..acce122201 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/config/area.py +++ b/antarest/study/storage/rawstudy/model/filesystem/config/area.py @@ -1,66 +1,18 @@ """ Object model used to read and update area configuration. """ -import json import re import typing as t import typing_extensions as te -from pydantic import BaseModel, Extra, Field, root_validator, validator +from pydantic import Field, root_validator, validator from antarest.study.business.enum_ignore_case import EnumIgnoreCase - - -class Properties( - BaseModel, - # On reading, if the configuration contains an extra field, it is ignored. - # This allows to read configurations that contain extra fields - # that are not yet managed by the code or that are deprecated. - extra=Extra.ignore, - # If a field is updated on assignment, it is also validated. - validate_assignment=True, - # On testing, we can use snake_case for field names. - allow_population_by_field_name=True, -): - """ - Base class for configuration sections. - """ - - def to_config(self) -> t.Mapping[str, t.Any]: - """ - Convert the object to a dictionary for writing to a configuration file (`*.ini`). - - Returns: - A dictionary with the configuration values. - """ - - config = {} - for field_name, field in self.__fields__.items(): - value = getattr(self, field_name) - if value is None: - continue - if isinstance(value, Properties): - config[field.alias] = value.to_config() - else: - config[field.alias] = json.loads(json.dumps(value)) - return config - - @classmethod - def construct(cls, _fields_set: t.Optional[t.Set[str]] = None, **values: t.Any) -> "Properties": - """ - Construct a new model instance from a dict of values, replacing aliases with real field names. - """ - # The pydantic construct() function does not allow aliases to be handled. - aliases = {(field.alias or name): name for name, field in cls.__fields__.items()} - renamed_values = {aliases.get(k, k): v for k, v in values.items()} - if _fields_set is not None: - _fields_set = {aliases.get(f, f) for f in _fields_set} - # noinspection PyTypeChecker - return super().construct(_fields_set, **renamed_values) +from antarest.study.storage.rawstudy.model.filesystem.config.ini_properties import IniProperties # noinspection SpellCheckingInspection -class OptimizationProperties(Properties): +class OptimizationProperties(IniProperties): """ Object linked to `/input/areas//optimization.ini` information. @@ -115,7 +67,7 @@ class OptimizationProperties(Properties): 'spread-unsupplied-energy-cost': 1500.0}} """ - class FilteringSection(Properties): + class FilteringSection(IniProperties): """Configuration read from section `[filtering]` of `/input/areas//optimization.ini`.""" filter_synthesis: str = Field("hourly, daily, weekly, monthly, annual", alias="filter-synthesis") @@ -130,7 +82,7 @@ def _validate_filtering(cls, v: t.Any) -> str: raise TypeError(f"Invalid type for filtering: {type(v)}") # noinspection SpellCheckingInspection - class ModalOptimizationSection(Properties): + class ModalOptimizationSection(IniProperties): """Configuration read from section `[nodal optimization]` of `/input/areas//optimization.ini`.""" non_dispatchable_power: bool = Field(default=True, alias="non-dispatchable-power") @@ -161,14 +113,14 @@ class AdequacyPatchMode(EnumIgnoreCase): VIRTUAL = "virtual" -class AdequacyPathProperties(Properties): +class AdequacyPathProperties(IniProperties): """ Object linked to `/input/areas//adequacy_patch.ini` information. Only available if study version >= 830. """ - class AdequacyPathSection(Properties): + class AdequacyPathSection(IniProperties): """Configuration read from section `[adequacy-patch]` of `/input/areas//adequacy_patch.ini`.""" adequacy_patch_mode: AdequacyPatchMode = Field(default=AdequacyPatchMode.OUTSIDE, alias="adequacy-patch-mode") @@ -176,7 +128,7 @@ class AdequacyPathSection(Properties): adequacy_patch: AdequacyPathSection = Field(default_factory=AdequacyPathSection, alias="adequacy-patch") -class AreaUI(Properties): +class AreaUI(IniProperties): """ Style of an area in the map or in a layer. @@ -265,7 +217,7 @@ def to_config(self) -> t.Mapping[str, t.Any]: } -class UIProperties(Properties): +class UIProperties(IniProperties): """ Object linked to `/input/areas//ui.ini` information. @@ -441,7 +393,7 @@ def to_config(self) -> t.Mapping[str, t.Mapping[str, t.Any]]: return obj -class AreaFolder(Properties): +class AreaFolder(IniProperties): """ Object linked to `/input/areas/` information. @@ -535,7 +487,7 @@ class AreaFolder(Properties): # noinspection SpellCheckingInspection -class ThermalAreasProperties(Properties): +class ThermalAreasProperties(IniProperties): """ Object linked to `/input/thermal/areas.ini` information. diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/ini_properties.py b/antarest/study/storage/rawstudy/model/filesystem/config/ini_properties.py new file mode 100644 index 0000000000..8c113ce164 --- /dev/null +++ b/antarest/study/storage/rawstudy/model/filesystem/config/ini_properties.py @@ -0,0 +1,52 @@ +import json +import typing as t + +from pydantic import BaseModel, Extra + + +class IniProperties( + BaseModel, + # On reading, if the configuration contains an extra field, it is ignored. + # This allows to read configurations that contain extra fields + # that are not yet managed by the code or that are deprecated. + extra=Extra.ignore, + # If a field is updated on assignment, it is also validated. + validate_assignment=True, + # On testing, we can use snake_case for field names. + allow_population_by_field_name=True, +): + """ + Base class for configuration sections. + """ + + def to_config(self) -> t.Mapping[str, t.Any]: + """ + Convert the object to a dictionary for writing to a configuration file (`*.ini`). + + Returns: + A dictionary with the configuration values. + """ + + config = {} + for field_name, field in self.__fields__.items(): + value = getattr(self, field_name) + if value is None: + continue + if isinstance(value, IniProperties): + config[field.alias] = value.to_config() + else: + config[field.alias] = json.loads(json.dumps(value)) + return config + + @classmethod + def construct(cls, _fields_set: t.Optional[t.Set[str]] = None, **values: t.Any) -> "IniProperties": + """ + Construct a new model instance from a dict of values, replacing aliases with real field names. + """ + # The pydantic construct() function does not allow aliases to be handled. + aliases = {(field.alias or name): name for name, field in cls.__fields__.items()} + renamed_values = {aliases.get(k, k): v for k, v in values.items()} + if _fields_set is not None: + _fields_set = {aliases.get(f, f) for f in _fields_set} + # noinspection PyTypeChecker + return super().construct(_fields_set, **renamed_values) From 8ca175b47c89bd4b84465c04439ffec10434ce3d Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Wed, 7 Feb 2024 14:39:49 +0100 Subject: [PATCH 082/147] feat(tablemode): integrate Links manager in the table mode --- .../business/areas/renewable_management.py | 2 +- .../business/areas/st_storage_management.py | 2 +- .../business/areas/thermal_management.py | 2 +- antarest/study/business/link_management.py | 56 +++++-- .../study/business/table_mode_management.py | 38 +++-- .../rawstudy/model/filesystem/config/area.py | 51 ++----- .../filesystem/config/field_validators.py | 2 + .../rawstudy/model/filesystem/config/links.py | 142 ++++++++++++++++++ .../study_data_blueprint/test_table_mode.py | 78 +++++++++- .../areas/test_st_storage_management.py | 4 +- 10 files changed, 297 insertions(+), 80 deletions(-) create mode 100644 antarest/study/storage/rawstudy/model/filesystem/config/links.py diff --git a/antarest/study/business/areas/renewable_management.py b/antarest/study/business/areas/renewable_management.py index 17f2b33c55..c721f0bb89 100644 --- a/antarest/study/business/areas/renewable_management.py +++ b/antarest/study/business/areas/renewable_management.py @@ -146,7 +146,7 @@ def get_clusters(self, study: Study, area_id: str) -> t.Sequence[RenewableCluste return [create_renewable_output(study.version, cluster_id, cluster) for cluster_id, cluster in clusters.items()] - def get_all_renewable_props( + def get_all_renewables_props( self, study: Study, ) -> t.Mapping[str, t.Sequence[RenewableClusterOutput]]: diff --git a/antarest/study/business/areas/st_storage_management.py b/antarest/study/business/areas/st_storage_management.py index 6ace7a760f..a9b52a0cf3 100644 --- a/antarest/study/business/areas/st_storage_management.py +++ b/antarest/study/business/areas/st_storage_management.py @@ -328,7 +328,7 @@ def get_storages( storages = [create_storage_output(study_version, storage_id, options) for storage_id, options in config.items()] return sorted(storages, key=order_by) - def get_all_storage_props( + def get_all_storages_props( self, study: Study, ) -> t.Mapping[str, t.Sequence[STStorageOutput]]: diff --git a/antarest/study/business/areas/thermal_management.py b/antarest/study/business/areas/thermal_management.py index 7e6eec9256..30fe6c2ce1 100644 --- a/antarest/study/business/areas/thermal_management.py +++ b/antarest/study/business/areas/thermal_management.py @@ -187,7 +187,7 @@ def get_clusters( study_version = study.version return [create_thermal_output(study_version, cluster_id, cluster) for cluster_id, cluster in clusters.items()] - def get_all_thermal_props( + def get_all_thermals_props( self, study: Study, ) -> t.Mapping[str, t.Sequence[ThermalClusterOutput]]: diff --git a/antarest/study/business/link_management.py b/antarest/study/business/link_management.py index 38358b052b..3667f3b20b 100644 --- a/antarest/study/business/link_management.py +++ b/antarest/study/business/link_management.py @@ -2,12 +2,16 @@ from pydantic import BaseModel -from antarest.study.business.utils import execute_or_add_commands +from antarest.core.exceptions import ConfigFileNotFound +from antarest.study.business.utils import execute_or_add_commands, camel_case_model, AllOptionalMetaclass from antarest.study.model import RawStudy +from antarest.study.storage.rawstudy.model.filesystem.config.links import LinkProperties from antarest.study.storage.storage_service import StudyStorageService from antarest.study.storage.variantstudy.model.command.create_link import CreateLink from antarest.study.storage.variantstudy.model.command.remove_link import RemoveLink +_ALL_LINKS_PATH = "input/links" + class LinkUIDTO(BaseModel): color: str @@ -21,19 +25,11 @@ class LinkInfoDTO(BaseModel): ui: t.Optional[LinkUIDTO] = None -class GenericElement(BaseModel): - id: str - name: str - - -class GenericItem(BaseModel): - element: GenericElement - item_list: t.List[GenericElement] - - -class AllCLustersAndLinks(BaseModel): - links: t.List[GenericItem] - clusters: t.List[GenericItem] +@camel_case_model +class GetLinkDTO(LinkProperties, metaclass=AllOptionalMetaclass): + """ + DTO object use to get the link information. + """ class LinkManager: @@ -81,3 +77,35 @@ def delete_link(self, study: RawStudy, area1_id: str, area2_id: str) -> None: command_context=self.storage_service.variant_study_service.command_factory.command_context, ) execute_or_add_commands(study, file_study, [command], self.storage_service) + + def get_all_links_props(self, study: RawStudy) -> t.Mapping[t.Tuple[str, str], GetLinkDTO]: + """ + Retrieves all links properties from the study. + + Args: + study: The raw study object. + Returns: + A mapping of link IDS `(area1_id, area2_id)` to link properties. + Raises: + ConfigFileNotFound: if a configuration file is not found. + """ + file_study = self.storage_service.get_storage(study).get_raw(study) + + # Get the link information from the `input/links/{area1}/properties.ini` file. + path = _ALL_LINKS_PATH + try: + links_cfg = file_study.tree.get(path.split("/"), depth=5) + except KeyError: + raise ConfigFileNotFound(path) from None + + # areas_cfg contains a dictionary where the keys are the area IDs, + # and the values are objects that can be converted to `LinkFolder`. + links_by_ids = {} + for area1_id, entries in links_cfg.items(): + property_map = entries.get("properties") or {} + for area2_id, properties_cfg in property_map.items(): + area1_id, area2_id = sorted([area1_id, area2_id]) + properties = LinkProperties.parse_obj(properties_cfg) + links_by_ids[(area1_id, area2_id)] = GetLinkDTO.parse_obj(properties.dict(by_alias=False)) + + return links_by_ids diff --git a/antarest/study/business/table_mode_management.py b/antarest/study/business/table_mode_management.py index 905b630786..1c95aab4de 100644 --- a/antarest/study/business/table_mode_management.py +++ b/antarest/study/business/table_mode_management.py @@ -15,6 +15,7 @@ from antarest.study.model import RawStudy from antarest.study.storage.rawstudy.model.filesystem.config.area import AdequacyPatchMode from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import BindingConstraintFrequency +from antarest.study.storage.rawstudy.model.filesystem.config.links import TransmissionCapacity, AssetType from antarest.study.storage.rawstudy.model.filesystem.config.thermal import LawOption, LocalTSGenerationBehavior from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy from antarest.study.storage.variantstudy.model.command.icommand import ICommand @@ -41,20 +42,6 @@ class TableTemplateType(EnumIgnoreCase): BINDING_CONSTRAINT = "binding constraint" -class AssetType(EnumIgnoreCase): - AC = "ac" - DC = "dc" - GAZ = "gaz" - VIRT = "virt" - OTHER = "other" - - -class TransmissionCapacity(EnumIgnoreCase): - INFINITE = "infinite" - IGNORE = "ignore" - ENABLED = "enabled" - - class BindingConstraintOperator(EnumIgnoreCase): LESS = "less" GREATER = "greater" @@ -693,23 +680,26 @@ def get_table_data( areas_map = self._area_manager.get_all_area_props(study) data = {area_id: area.dict(by_alias=True) for area_id, area in areas_map.items()} elif table_type == TableTemplateType.LINK: - pass + links_map = self._link_manager.get_all_links_props(study) + data = { + f"{area1_id} / {area2_id}": link.dict(by_alias=True) for (area1_id, area2_id), link in links_map.items() + } elif table_type == TableTemplateType.THERMAL_CLUSTER: - clusters_map = self._thermal_manager.get_all_thermal_props(study) + clusters_map = self._thermal_manager.get_all_thermals_props(study) data = { f"{area_id} / {cluster.id}": cluster.dict(by_alias=True) for area_id, clusters in clusters_map.items() for cluster in clusters } elif table_type == TableTemplateType.RENEWABLE_CLUSTER: - clusters_map = self._renewable_manager.get_all_renewable_props(study) + clusters_map = self._renewable_manager.get_all_renewables_props(study) data = { f"{area_id} / {cluster.id}": cluster.dict(by_alias=True) for area_id, clusters in clusters_map.items() for cluster in clusters } elif table_type == TableTemplateType.ST_STORAGE: - storage_map = self._st_storage_manager.get_all_storage_props(study) + storage_map = self._st_storage_manager.get_all_storages_props(study) data = { f"{area_id} / {storage.id}": storage.dict(by_alias=True) for area_id, storages in storage_map.items() @@ -721,12 +711,18 @@ def get_table_data( df = pd.DataFrame.from_dict(data, orient="index") if columns: # Create a new dataframe with the listed columns. - # If a column does not exist in the DataFrame, it is created with empty values, - # because NaN (or `None`) is not JSON-serializable. + # If a column does not exist in the DataFrame, it is created with empty values. df = pd.DataFrame(df, columns=columns) - df = df.where(pd.notna(df), other="") + df = df.where(pd.notna(df), other=None) obj = df.to_dict(orient="index") + + # Convert NaN to `None` because it is not JSON-serializable + for row in obj.values(): + for key, value in row.items(): + if pd.isna(value): + row[key] = None + return obj file_study = self.storage_service.get_storage(study).get_raw(study) diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/area.py b/antarest/study/storage/rawstudy/model/filesystem/config/area.py index acce122201..2b74b791fc 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/config/area.py +++ b/antarest/study/storage/rawstudy/model/filesystem/config/area.py @@ -1,13 +1,17 @@ """ Object model used to read and update area configuration. """ -import re import typing as t import typing_extensions as te from pydantic import Field, root_validator, validator from antarest.study.business.enum_ignore_case import EnumIgnoreCase +from antarest.study.storage.rawstudy.model.filesystem.config.field_validators import ( + validate_filtering, + validate_colors, + validate_color_rgb, +) from antarest.study.storage.rawstudy.model.filesystem.config.ini_properties import IniProperties @@ -73,13 +77,12 @@ class FilteringSection(IniProperties): filter_synthesis: str = Field("hourly, daily, weekly, monthly, annual", alias="filter-synthesis") filter_year_by_year: str = Field("hourly, daily, weekly, monthly, annual", alias="filter-year-by-year") - @validator("filter_synthesis", "filter_year_by_year", pre=True) - def _validate_filtering(cls, v: t.Any) -> str: - if isinstance(v, str): - values = list(set(re.findall(r"hourly|daily|weekly|monthly|annual", v.lower()))) - values.sort(key=lambda x: ["hourly", "daily", "weekly", "monthly", "annual"].index(x)) - return ", ".join(values) - raise TypeError(f"Invalid type for filtering: {type(v)}") + _validate_filtering = validator( + "filter_synthesis", + "filter_year_by_year", + pre=True, + allow_reuse=True, + )(validate_filtering) # noinspection SpellCheckingInspection class ModalOptimizationSection(IniProperties): @@ -158,42 +161,18 @@ class AreaUI(IniProperties): """ x: int = Field(0, description="x coordinate of the area in the map") - y: int = Field(0, description="x coordinate of the area in the map") + y: int = Field(0, description="y coordinate of the area in the map") color_rgb: t.Tuple[int, int, int] = Field( (230, 108, 44), alias="colorRgb", description="color of the area in the map", ) + _validate_color_rgb = validator("color_rgb", pre=True, allow_reuse=True)(validate_color_rgb) + @root_validator(pre=True) def _validate_colors(cls, values: t.MutableMapping[str, t.Any]) -> t.Mapping[str, t.Any]: - # Parse the `[ui]` section (if any) - color_r = values.pop("color_r", None) - color_g = values.pop("color_g", None) - color_b = values.pop("color_b", None) - if color_r is not None and color_g is not None and color_b is not None: - values["color_rgb"] = color_r, color_g, color_b - return values - - @validator("color_rgb", pre=True) - def _validate_color_rgb(cls, v: t.Any) -> t.Tuple[int, int, int]: - if isinstance(v, str): - if v.startswith("#"): - r = int(v[1:3], 16) - g = int(v[3:5], 16) - b = int(v[5:7], 16) - elif v.startswith("rgb("): - r, g, b = [int(c) for c in v[4:-1].split(",")] - else: - r, g, b = [int(c) for c in v.split(",")] - return r, g, b - - elif isinstance(v, (list, tuple)): - r, g, b = v - return r, g, b - - else: - raise TypeError(f"Invalid type for 'color_rgb': {type(v)}") + return validate_colors(values) def to_config(self) -> t.Mapping[str, t.Any]: """ diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/field_validators.py b/antarest/study/storage/rawstudy/model/filesystem/config/field_validators.py index 74f93f5c46..f8044d786c 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/config/field_validators.py +++ b/antarest/study/storage/rawstudy/model/filesystem/config/field_validators.py @@ -1,6 +1,8 @@ +import re import typing as t _ALL_FILTERING = ["hourly", "daily", "weekly", "monthly", "annual"] +_find_all_filtering = re.compile("|".join(_ALL_FILTERING)).findall def extract_filtering(v: t.Any) -> t.Sequence[str]: diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/links.py b/antarest/study/storage/rawstudy/model/filesystem/config/links.py new file mode 100644 index 0000000000..d84d795c22 --- /dev/null +++ b/antarest/study/storage/rawstudy/model/filesystem/config/links.py @@ -0,0 +1,142 @@ +""" +Object model used to read and update link configuration. +""" +import typing as t + +from pydantic import validator, Field, root_validator + +from antarest.study.business.enum_ignore_case import EnumIgnoreCase +from antarest.study.storage.rawstudy.model.filesystem.config.field_validators import ( + validate_filtering, + validate_colors, + validate_color_rgb, +) +from antarest.study.storage.rawstudy.model.filesystem.config.ini_properties import IniProperties + + +# noinspection SpellCheckingInspection +class AssetType(EnumIgnoreCase): + """ + Enum representing the type of asset for a link between two areas. + + Attributes: + AC: Represents an Alternating Current link. This is the most common type of electricity transmission. + DC: Represents a Direct Current link. This is typically used for long-distance transmission. + GAZ: Represents a gas link. This is used when the link is related to gas transmission. + VIRT: Represents a virtual link. This is used when the link doesn't physically exist + but is used for modeling purposes. + OTHER: Represents any other type of link that doesn't fall into the above categories. + """ + + AC = "ac" + DC = "dc" + GAZ = "gaz" + VIRT = "virt" + OTHER = "other" + + +class TransmissionCapacity(EnumIgnoreCase): + """ + Enum representing the transmission capacity of a link. + + Attributes: + INFINITE: Represents a link with infinite transmission capacity. + This means there are no limits on the amount of electricity that can be transmitted. + IGNORE: Represents a link where the transmission capacity is ignored. + This means the capacity is not considered during simulations. + ENABLED: Represents a link with a specific transmission capacity. + This means the capacity is considered in the model and has a certain limit. + """ + + INFINITE = "infinite" + IGNORE = "ignore" + ENABLED = "enabled" + + +class LinkProperties(IniProperties): + """ + Configuration read from a section in the `input/links//properties.ini` file. + + Usage: + + >>> from antarest.study.storage.rawstudy.model.filesystem.config.links import LinkProperties + >>> from pprint import pprint + + Create and validate a new `LinkProperties` object from a dictionary read from a configuration file. + + >>> obj = { + ... "hurdles-cost": "false", + ... "loop-flow": "false", + ... "use-phase-shifter": "false", + ... "transmission-capacities": "infinite", + ... "asset-type": "ac", + ... "link-style": "plain", + ... "link-width": "1", + ... "colorr": "80", + ... "colorg": "192", + ... "colorb": "255", + ... "display-comments": "true", + ... "filter-synthesis": "hourly, daily, weekly, monthly, annual", + ... "filter-year-by-year": "hourly, daily, weekly, monthly, annual", + ... } + + >>> opt = LinkProperties.parse_obj(obj) + + >>> pprint(opt.dict(by_alias=True), width=80) + {'asset-type': , + 'colorRgb': (80, 192, 255), + 'display-comments': True, + 'filter-synthesis': 'hourly, daily, weekly, monthly, annual', + 'filter-year-by-year': 'hourly, daily, weekly, monthly, annual', + 'hurdles-cost': False, + 'link-style': 'plain', + 'link-width': 1, + 'loop-flow': False, + 'transmission-capacities': , + 'use-phase-shifter': False} + """ + + hurdles_cost: bool = Field(default=False, alias="hurdles-cost") + loop_flow: bool = Field(default=False, alias="loop-flow") + use_phase_shifter: bool = Field(default=False, alias="use-phase-shifter") + transmission_capacities: TransmissionCapacity = Field( + default=TransmissionCapacity.ENABLED, alias="transmission-capacities" + ) + asset_type: AssetType = Field(default=AssetType.AC, alias="asset-type") + link_style: str = Field(default="plain", alias="link-style") + link_width: int = Field(default=1, alias="link-width") + display_comments: bool = Field(default=True, alias="display-comments") + filter_synthesis: str = Field(default="hourly, daily, weekly, monthly, annual", alias="filter-synthesis") + filter_year_by_year: str = Field(default="hourly, daily, weekly, monthly, annual", alias="filter-year-by-year") + color_rgb: t.Tuple[int, int, int] = Field( + (112, 112, 112), + alias="colorRgb", + description="color of the link in the map", + ) + + _validate_filtering = validator( + "filter_synthesis", + "filter_year_by_year", + pre=True, + allow_reuse=True, + )(validate_filtering) + + _validate_color_rgb = validator("color_rgb", pre=True, allow_reuse=True)(validate_color_rgb) + + @root_validator(pre=True) + def _validate_colors(cls, values: t.MutableMapping[str, t.Any]) -> t.Mapping[str, t.Any]: + return validate_colors(values) + + # noinspection SpellCheckingInspection + def to_config(self) -> t.Mapping[str, t.Any]: + """ + Convert the object to a dictionary for writing to a configuration file. + """ + obj = dict(super().to_config()) + color_rgb = obj.pop("color_rgb", (112, 112, 112)) + return { + "colorr": color_rgb[0], + "colorg": color_rgb[1], + "colorb": color_rgb[2], + **obj, + } diff --git a/tests/integration/study_data_blueprint/test_table_mode.py b/tests/integration/study_data_blueprint/test_table_mode.py index ca0b51954e..6a5a39c976 100644 --- a/tests/integration/study_data_blueprint/test_table_mode.py +++ b/tests/integration/study_data_blueprint/test_table_mode.py @@ -131,6 +131,76 @@ def test_lifecycle__nominal( actual = res.json() assert actual == expected + # Table Mode - Links + # ================== + + res = client.get( + f"/v1/studies/{study_id}/tablemode", + headers=user_headers, + params={ + "table_type": "link", + "columns": ",".join( + [ + "hurdlesCost", + "loopFlow", + "usePhaseShifter", + "transmissionCapacities", + "assetType", + "linkStyle", + "linkWidth", + "displayComments", + "filterSynthesis", + "filterYearByYear", + "colorRgb", + ] + ), + }, + ) + assert res.status_code == 200, res.json() + expected = { + "de / fr": { + "assetType": "ac", + "colorRgb": [112, 112, 112], + "displayComments": True, + "filterSynthesis": "", + "filterYearByYear": "hourly", + "hurdlesCost": True, + "linkStyle": "plain", + "linkWidth": 1, + "loopFlow": False, + "transmissionCapacities": "enabled", + "usePhaseShifter": False, + }, + "es / fr": { + "assetType": "ac", + "colorRgb": [112, 112, 112], + "displayComments": True, + "filterSynthesis": "", + "filterYearByYear": "hourly", + "hurdlesCost": True, + "linkStyle": "plain", + "linkWidth": 1, + "loopFlow": False, + "transmissionCapacities": "enabled", + "usePhaseShifter": False, + }, + "fr / it": { + "assetType": "ac", + "colorRgb": [112, 112, 112], + "displayComments": True, + "filterSynthesis": "", + "filterYearByYear": "hourly", + "hurdlesCost": True, + "linkStyle": "plain", + "linkWidth": 1, + "loopFlow": False, + "transmissionCapacities": "enabled", + "usePhaseShifter": False, + }, + } + actual = res.json() + assert actual == expected + # Table Mode - Thermal Clusters # ============================= @@ -343,28 +413,28 @@ def test_lifecycle__nominal( "injectionNominalCapacity": 1500, "reservoirCapacity": 1500, "withdrawalNominalCapacity": 1500, - "unknowColumn": "", + "unknowColumn": None, }, "fr / tesla": { "group": "Battery", "injectionNominalCapacity": 1200, "reservoirCapacity": 1200, "withdrawalNominalCapacity": 1200, - "unknowColumn": "", + "unknowColumn": None, }, "it / storage3": { "group": "PSP_open", "injectionNominalCapacity": 1234, "reservoirCapacity": 1357, "withdrawalNominalCapacity": 1020, - "unknowColumn": "", + "unknowColumn": None, }, "it / storage4": { "group": "PSP_open", "injectionNominalCapacity": 567, "reservoirCapacity": 500, "withdrawalNominalCapacity": 456, - "unknowColumn": "", + "unknowColumn": None, }, } actual = res.json() diff --git a/tests/study/business/areas/test_st_storage_management.py b/tests/study/business/areas/test_st_storage_management.py index 5db040716d..1e97acf0d7 100644 --- a/tests/study/business/areas/test_st_storage_management.py +++ b/tests/study/business/areas/test_st_storage_management.py @@ -131,7 +131,7 @@ def test_get_all_storages__nominal_case( manager = STStorageManager(study_storage_service) # run - all_storages = manager.get_all_storage_props(study) + all_storages = manager.get_all_storages_props(study) # Check actual = {area_id: [form.dict(by_alias=True) for form in forms] for area_id, forms in all_storages.items()} @@ -204,7 +204,7 @@ def test_get_all_storages__config_not_found( # run with pytest.raises(STStorageConfigNotFound, match="not found"): - manager.get_all_storage_props(study) + manager.get_all_storages_props(study) def test_get_st_storages__nominal_case( self, From 7366716226c521e58bd4f6601ec251e397c7afcc Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Thu, 15 Feb 2024 23:51:50 +0100 Subject: [PATCH 083/147] refactor(tablemode): refactor the binding constraints --- .../business/binding_constraint_management.py | 5 +- antarest/study/business/link_management.py | 2 +- .../study/business/table_mode_management.py | 14 ++---- .../business/timeseries_config_management.py | 4 +- .../rawstudy/model/filesystem/config/area.py | 4 +- .../filesystem/config/binding_constraint.py | 50 +++++++++---------- .../rawstudy/model/filesystem/config/files.py | 6 +-- .../rawstudy/model/filesystem/config/links.py | 6 +-- .../rawstudy/model/filesystem/config/model.py | 20 +++++--- .../model/filesystem/root/input/input.py | 4 +- .../output/simulation/mode/common/area.py | 2 +- .../study/storage/study_download_utils.py | 4 +- .../business/utils_binding_constraint.py | 3 +- .../variantstudy/model/command/common.py | 7 --- .../variantstudy/model/command/create_area.py | 4 +- .../command/create_binding_constraint.py | 11 ++-- .../command/create_renewables_cluster.py | 6 +-- tests/integration/test_integration.py | 8 ++- .../filesystem/config/test_config_files.py | 6 +-- .../model/command/test_create_area.py | 6 +-- .../command/test_create_renewables_cluster.py | 4 +- .../test_manage_binding_constraints.py | 6 ++- .../model/command/test_remove_area.py | 6 ++- .../model/command/test_remove_cluster.py | 6 ++- .../command/test_remove_renewables_cluster.py | 4 +- 25 files changed, 95 insertions(+), 103 deletions(-) diff --git a/antarest/study/business/binding_constraint_management.py b/antarest/study/business/binding_constraint_management.py index 010e1424d0..b642a26f79 100644 --- a/antarest/study/business/binding_constraint_management.py +++ b/antarest/study/business/binding_constraint_management.py @@ -24,7 +24,10 @@ from antarest.study.business.all_optional_meta import camel_case_model from antarest.study.business.utils import execute_or_add_commands from antarest.study.model import Study -from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import BindingConstraintFrequency +from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import ( + BindingConstraintFrequency, + BindingConstraintOperator, +) from antarest.study.storage.rawstudy.model.filesystem.config.model import transform_name_to_id from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy from antarest.study.storage.storage_service import StudyStorageService diff --git a/antarest/study/business/link_management.py b/antarest/study/business/link_management.py index 3667f3b20b..60e3ddc947 100644 --- a/antarest/study/business/link_management.py +++ b/antarest/study/business/link_management.py @@ -3,7 +3,7 @@ from pydantic import BaseModel from antarest.core.exceptions import ConfigFileNotFound -from antarest.study.business.utils import execute_or_add_commands, camel_case_model, AllOptionalMetaclass +from antarest.study.business.utils import AllOptionalMetaclass, camel_case_model, execute_or_add_commands from antarest.study.model import RawStudy from antarest.study.storage.rawstudy.model.filesystem.config.links import LinkProperties from antarest.study.storage.storage_service import StudyStorageService diff --git a/antarest/study/business/table_mode_management.py b/antarest/study/business/table_mode_management.py index 1c95aab4de..8f4791aff4 100644 --- a/antarest/study/business/table_mode_management.py +++ b/antarest/study/business/table_mode_management.py @@ -14,8 +14,11 @@ from antarest.study.common.default_values import FilteringOptions, LinkProperties, NodalOptimization from antarest.study.model import RawStudy from antarest.study.storage.rawstudy.model.filesystem.config.area import AdequacyPatchMode -from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import BindingConstraintFrequency -from antarest.study.storage.rawstudy.model.filesystem.config.links import TransmissionCapacity, AssetType +from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import ( + BindingConstraintFrequency, + BindingConstraintOperator, +) +from antarest.study.storage.rawstudy.model.filesystem.config.links import AssetType, TransmissionCapacity from antarest.study.storage.rawstudy.model.filesystem.config.thermal import LawOption, LocalTSGenerationBehavior from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy from antarest.study.storage.variantstudy.model.command.icommand import ICommand @@ -42,13 +45,6 @@ class TableTemplateType(EnumIgnoreCase): BINDING_CONSTRAINT = "binding constraint" -class BindingConstraintOperator(EnumIgnoreCase): - LESS = "less" - GREATER = "greater" - BOTH = "both" - EQUAL = "equal" - - class AreaColumns(FormFieldsBaseModel, metaclass=AllOptionalMetaclass): # Optimization - Nodal optimization non_dispatchable_power: bool = Field( diff --git a/antarest/study/business/timeseries_config_management.py b/antarest/study/business/timeseries_config_management.py index fac397b3be..418921e7f0 100644 --- a/antarest/study/business/timeseries_config_management.py +++ b/antarest/study/business/timeseries_config_management.py @@ -6,7 +6,7 @@ from antarest.study.business.enum_ignore_case import EnumIgnoreCase from antarest.study.business.utils import GENERAL_DATA_PATH, FormFieldsBaseModel, execute_or_add_commands from antarest.study.model import Study -from antarest.study.storage.rawstudy.model.filesystem.config.model import ENR_MODELLING +from antarest.study.storage.rawstudy.model.filesystem.config.model import EnrModelling from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy from antarest.study.storage.storage_service import StudyStorageService from antarest.study.storage.variantstudy.model.command.update_config import UpdateConfig @@ -193,7 +193,7 @@ def __get_form_fields_for_type( input_ = general_data.get("input", {}) output = general_data.get("output", {}) - is_aggregated = file_study.config.enr_modelling == ENR_MODELLING.AGGREGATED.value + is_aggregated = file_study.config.enr_modelling == EnrModelling.AGGREGATED.value if ts_type == TSType.RENEWABLES and is_aggregated: return None diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/area.py b/antarest/study/storage/rawstudy/model/filesystem/config/area.py index 2b74b791fc..5fdd6fcf88 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/config/area.py +++ b/antarest/study/storage/rawstudy/model/filesystem/config/area.py @@ -8,9 +8,9 @@ from antarest.study.business.enum_ignore_case import EnumIgnoreCase from antarest.study.storage.rawstudy.model.filesystem.config.field_validators import ( - validate_filtering, - validate_colors, validate_color_rgb, + validate_colors, + validate_filtering, ) from antarest.study.storage.rawstudy.model.filesystem.config.ini_properties import IniProperties diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/binding_constraint.py b/antarest/study/storage/rawstudy/model/filesystem/config/binding_constraint.py index a396ea950d..0bdf0160be 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/config/binding_constraint.py +++ b/antarest/study/storage/rawstudy/model/filesystem/config/binding_constraint.py @@ -1,28 +1,14 @@ -import typing as t -from enum import Enum +from antarest.study.business.enum_ignore_case import EnumIgnoreCase -from pydantic import BaseModel - -class BindingConstraintFrequency(str, Enum): +class BindingConstraintFrequency(EnumIgnoreCase): """ - Frequency of binding constraint - - - HOURLY: hourly time series with 8784 lines - - DAILY: daily time series with 366 lines - - WEEKLY: weekly time series with 366 lines (same as daily) - - Usage example: - - >>> bcf = BindingConstraintFrequency.HOURLY - >>> bcf == "hourly" - True - >>> bcf = BindingConstraintFrequency.DAILY - >>> "daily" == bcf - True - >>> bcf = BindingConstraintFrequency.WEEKLY - >>> bcf != "daily" - True + Frequency of a binding constraint. + + Attributes: + HOURLY: hourly time series with 8784 lines + DAILY: daily time series with 366 lines + WEEKLY: weekly time series with 366 lines (same as daily) """ HOURLY = "hourly" @@ -30,8 +16,18 @@ class BindingConstraintFrequency(str, Enum): WEEKLY = "weekly" -class BindingConstraintDTO(BaseModel): - id: str - areas: t.Set[str] - clusters: t.Set[str] - time_step: BindingConstraintFrequency +class BindingConstraintOperator(EnumIgnoreCase): + """ + Operator of a binding constraint. + + Attributes: + LESS: less than or equal to + GREATER: greater than or equal to + BOTH: both LESS and GREATER + EQUAL: equal to + """ + + LESS = "less" + GREATER = "greater" + BOTH = "both" + EQUAL = "equal" diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/files.py b/antarest/study/storage/rawstudy/model/filesystem/config/files.py index cafc901644..6f49c9f6fa 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/config/files.py +++ b/antarest/study/storage/rawstudy/model/filesystem/config/files.py @@ -10,10 +10,7 @@ from antarest.core.model import JSON from antarest.study.storage.rawstudy.ini_reader import IniReader -from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import ( - BindingConstraintDTO, - BindingConstraintFrequency, -) +from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import BindingConstraintFrequency from antarest.study.storage.rawstudy.model.filesystem.config.exceptions import ( SimulationParsingError, XpansionParsingError, @@ -21,6 +18,7 @@ from antarest.study.storage.rawstudy.model.filesystem.config.field_validators import extract_filtering from antarest.study.storage.rawstudy.model.filesystem.config.model import ( Area, + BindingConstraintDTO, DistrictSet, FileStudyTreeConfig, Link, diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/links.py b/antarest/study/storage/rawstudy/model/filesystem/config/links.py index d84d795c22..c10b91bec0 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/config/links.py +++ b/antarest/study/storage/rawstudy/model/filesystem/config/links.py @@ -3,13 +3,13 @@ """ import typing as t -from pydantic import validator, Field, root_validator +from pydantic import Field, root_validator, validator from antarest.study.business.enum_ignore_case import EnumIgnoreCase from antarest.study.storage.rawstudy.model.filesystem.config.field_validators import ( - validate_filtering, - validate_colors, validate_color_rgb, + validate_colors, + validate_filtering, ) from antarest.study.storage.rawstudy.model.filesystem.config.ini_properties import IniProperties diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/model.py b/antarest/study/storage/rawstudy/model/filesystem/config/model.py index 18e9702571..2ec15e6915 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/config/model.py +++ b/antarest/study/storage/rawstudy/model/filesystem/config/model.py @@ -1,21 +1,20 @@ import re import typing as t -from enum import Enum from pathlib import Path +from pydantic import BaseModel from pydantic import Field, root_validator -from pydantic.main import BaseModel from antarest.core.utils.utils import DTO - -from .binding_constraint import BindingConstraintDTO +from antarest.study.business.enum_ignore_case import EnumIgnoreCase +from .binding_constraint import BindingConstraintFrequency from .field_validators import extract_filtering from .renewable import RenewableConfigType from .st_storage import STStorageConfigType from .thermal import ThermalConfigType -class ENR_MODELLING(Enum): +class EnrModelling(EnumIgnoreCase): AGGREGATED = "aggregated" CLUSTERS = "clusters" @@ -101,6 +100,13 @@ def get_file(self) -> str: return f"{self.date}{modes[self.mode]}{dash}{self.name}" +class BindingConstraintDTO(BaseModel): + id: str + areas: t.Set[str] + clusters: t.Set[str] + time_step: BindingConstraintFrequency + + class FileStudyTreeConfig(DTO): """ Root object to handle all study parameters which impact tree structure @@ -119,7 +125,7 @@ def __init__( bindings: t.Optional[t.List[BindingConstraintDTO]] = None, store_new_set: bool = False, archive_input_series: t.Optional[t.List[str]] = None, - enr_modelling: str = ENR_MODELLING.AGGREGATED.value, + enr_modelling: str = EnrModelling.AGGREGATED.value, cache: t.Optional[t.Dict[str, t.List[str]]] = None, zip_path: t.Optional[Path] = None, ): @@ -254,7 +260,7 @@ class FileStudyTreeConfigDTO(BaseModel): bindings: t.List[BindingConstraintDTO] = list() store_new_set: bool = False archive_input_series: t.List[str] = list() - enr_modelling: str = ENR_MODELLING.AGGREGATED.value + enr_modelling: str = EnrModelling.AGGREGATED.value zip_path: t.Optional[Path] = None @staticmethod diff --git a/antarest/study/storage/rawstudy/model/filesystem/root/input/input.py b/antarest/study/storage/rawstudy/model/filesystem/root/input/input.py index 88b58c5369..4e26ff0c9c 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/root/input/input.py +++ b/antarest/study/storage/rawstudy/model/filesystem/root/input/input.py @@ -1,4 +1,4 @@ -from antarest.study.storage.rawstudy.model.filesystem.config.model import ENR_MODELLING +from antarest.study.storage.rawstudy.model.filesystem.config.model import EnrModelling from antarest.study.storage.rawstudy.model.filesystem.folder_node import FolderNode from antarest.study.storage.rawstudy.model.filesystem.inode import TREE from antarest.study.storage.rawstudy.model.filesystem.root.input.areas.areas import InputAreas @@ -37,7 +37,7 @@ def build(self) -> TREE: "wind": InputWind(self.context, self.config.next_file("wind")), } - if self.config.enr_modelling == ENR_MODELLING.CLUSTERS.value: + if self.config.enr_modelling == EnrModelling.CLUSTERS.value: children["renewables"] = ClusteredRenewables(self.context, self.config.next_file("renewables")) if self.config.version >= 860: children["st-storage"] = InputSTStorage(self.context, self.config.next_file("st-storage")) diff --git a/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/common/area.py b/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/common/area.py index d2b9541a22..dc5726554d 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/common/area.py +++ b/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/common/area.py @@ -53,7 +53,7 @@ def build(self) -> TREE: self.area, ) - # has_enr_clusters = self.config.enr_modelling == ENR_MODELLING.CLUSTERS.value and + # has_enr_clusters = self.config.enr_modelling == EnrModelling.CLUSTERS.value and # len(self.config.get_renewable_ids(self.area)) > 0 # todo get the config related to this output (now this may fail if input has changed since the launch) has_enr_clusters = True diff --git a/antarest/study/storage/study_download_utils.py b/antarest/study/storage/study_download_utils.py index 9cc25d5586..c89d60b380 100644 --- a/antarest/study/storage/study_download_utils.py +++ b/antarest/study/storage/study_download_utils.py @@ -22,7 +22,7 @@ StudyDownloadType, TimeSerie, ) -from antarest.study.storage.rawstudy.model.filesystem.config.model import ENR_MODELLING, Area, FileStudyTreeConfig +from antarest.study.storage.rawstudy.model.filesystem.config.model import Area, EnrModelling, FileStudyTreeConfig from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy from antarest.study.storage.rawstudy.model.filesystem.folder_node import ChildNotFoundError, FilterError, FolderNode from antarest.study.storage.rawstudy.model.filesystem.inode import INode @@ -98,7 +98,7 @@ def level_output_filter( data: StudyDownloadDTO, ) -> None: cluster_details = [f"details-{data.level.value}"] - if study.config.enr_modelling == ENR_MODELLING.CLUSTERS.value: + if study.config.enr_modelling == EnrModelling.CLUSTERS.value: cluster_details += [f"details-res-{data.level.value}"] files_matcher = ( diff --git a/antarest/study/storage/variantstudy/business/utils_binding_constraint.py b/antarest/study/storage/variantstudy/business/utils_binding_constraint.py index 4db7c525d8..552afed5e0 100644 --- a/antarest/study/storage/variantstudy/business/utils_binding_constraint.py +++ b/antarest/study/storage/variantstudy/business/utils_binding_constraint.py @@ -1,10 +1,9 @@ import typing as t from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import ( - BindingConstraintDTO, BindingConstraintFrequency, ) -from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig +from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig, BindingConstraintDTO def parse_bindings_coeffs_and_save_into_config( diff --git a/antarest/study/storage/variantstudy/model/command/common.py b/antarest/study/storage/variantstudy/model/command/common.py index a6ac905fd9..40ec8629cf 100644 --- a/antarest/study/storage/variantstudy/model/command/common.py +++ b/antarest/study/storage/variantstudy/model/command/common.py @@ -8,13 +8,6 @@ class CommandOutput: message: str = "" -class BindingConstraintOperator(Enum): - BOTH = "both" - EQUAL = "equal" - GREATER = "greater" - LESS = "less" - - class CoeffType(Enum): THERMAL = "thermal" LINK = "link" diff --git a/antarest/study/storage/variantstudy/model/command/create_area.py b/antarest/study/storage/variantstudy/model/command/create_area.py index d2114c254e..f956ef298c 100644 --- a/antarest/study/storage/variantstudy/model/command/create_area.py +++ b/antarest/study/storage/variantstudy/model/command/create_area.py @@ -5,8 +5,8 @@ from antarest.core.model import JSON from antarest.study.common.default_values import FilteringOptions, NodalOptimization from antarest.study.storage.rawstudy.model.filesystem.config.model import ( - ENR_MODELLING, Area, + EnrModelling, FileStudyTreeConfig, transform_name_to_id, ) @@ -238,7 +238,7 @@ def _apply(self, study_data: FileStudy) -> CommandOutput: f"waterValues_{area_id}" ] = self.command_context.generator_matrix_constants.get_null_matrix() - if version >= 810 and study_data.config.enr_modelling == ENR_MODELLING.CLUSTERS.value: + if version >= 810 and study_data.config.enr_modelling == EnrModelling.CLUSTERS.value: new_area_data["input"]["renewables"] = { "clusters": {area_id: {"list": {}}}, } diff --git a/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py b/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py index 70a1c1f627..d66df2c970 100644 --- a/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py +++ b/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py @@ -7,7 +7,10 @@ from antarest.matrixstore.model import MatrixData from antarest.study.business.all_optional_meta import AllOptionalMetaclass -from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import BindingConstraintFrequency +from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import ( + BindingConstraintFrequency, + BindingConstraintOperator, +) from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig, transform_name_to_id from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy from antarest.study.storage.variantstudy.business.matrix_constants_generator import GeneratorMatrixConstants @@ -15,11 +18,7 @@ from antarest.study.storage.variantstudy.business.utils_binding_constraint import ( parse_bindings_coeffs_and_save_into_config, ) -from antarest.study.storage.variantstudy.model.command.common import ( - BindingConstraintOperator, - CommandName, - CommandOutput, -) +from antarest.study.storage.variantstudy.model.command.common import CommandName, CommandOutput from antarest.study.storage.variantstudy.model.command.icommand import MATCH_SIGNATURE_SEPARATOR, ICommand from antarest.study.storage.variantstudy.model.model import CommandDTO diff --git a/antarest/study/storage/variantstudy/model/command/create_renewables_cluster.py b/antarest/study/storage/variantstudy/model/command/create_renewables_cluster.py index ab61d8f710..3e5ad8e213 100644 --- a/antarest/study/storage/variantstudy/model/command/create_renewables_cluster.py +++ b/antarest/study/storage/variantstudy/model/command/create_renewables_cluster.py @@ -4,8 +4,8 @@ from antarest.core.model import JSON from antarest.study.storage.rawstudy.model.filesystem.config.model import ( - ENR_MODELLING, Area, + EnrModelling, FileStudyTreeConfig, transform_name_to_id, ) @@ -42,7 +42,7 @@ def validate_cluster_name(cls, val: str) -> str: return val def _apply_config(self, study_data: FileStudyTreeConfig) -> t.Tuple[CommandOutput, t.Dict[str, t.Any]]: - if study_data.enr_modelling != ENR_MODELLING.CLUSTERS.value: + if study_data.enr_modelling != EnrModelling.CLUSTERS.value: # Since version 8.1 of the solver, we can use renewable clusters # instead of "Load", "Wind" and "Solar" objects for modelling. # When the "renewable-generation-modelling" parameter is set to "aggregated", @@ -50,7 +50,7 @@ def _apply_config(self, study_data: FileStudyTreeConfig) -> t.Tuple[CommandOutpu # To use renewable clusters, the parameter must therefore be set to "clusters". message = ( f"Parameter 'renewable-generation-modelling'" - f" must be set to '{ENR_MODELLING.CLUSTERS.value}'" + f" must be set to '{EnrModelling.CLUSTERS.value}'" f" instead of '{study_data.enr_modelling}'" ) return CommandOutput(status=False, message=message), {} diff --git a/tests/integration/test_integration.py b/tests/integration/test_integration.py index 3b77682658..133f7a8f29 100644 --- a/tests/integration/test_integration.py +++ b/tests/integration/test_integration.py @@ -17,13 +17,11 @@ TransmissionCapacities, UnfeasibleProblemBehavior, ) -from antarest.study.business.table_mode_management import ( - AssetType, +from antarest.study.business.table_mode_management import AssetType, TableTemplateType, TransmissionCapacity +from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import ( + BindingConstraintFrequency, BindingConstraintOperator, - TableTemplateType, - TransmissionCapacity, ) -from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import BindingConstraintFrequency from antarest.study.storage.rawstudy.model.filesystem.config.renewable import RenewableClusterGroup from antarest.study.storage.rawstudy.model.filesystem.config.thermal import LawOption, LocalTSGenerationBehavior from antarest.study.storage.variantstudy.model.command.common import CommandName diff --git a/tests/storage/repository/filesystem/config/test_config_files.py b/tests/storage/repository/filesystem/config/test_config_files.py index d88c363d5f..f07ac8f3db 100644 --- a/tests/storage/repository/filesystem/config/test_config_files.py +++ b/tests/storage/repository/filesystem/config/test_config_files.py @@ -6,10 +6,7 @@ import pytest -from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import ( - BindingConstraintDTO, - BindingConstraintFrequency, -) +from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import BindingConstraintFrequency from antarest.study.storage.rawstudy.model.filesystem.config.files import ( _parse_links_filtering, _parse_renewables, @@ -21,6 +18,7 @@ ) from antarest.study.storage.rawstudy.model.filesystem.config.model import ( Area, + BindingConstraintDTO, DistrictSet, FileStudyTreeConfig, Link, diff --git a/tests/variantstudy/model/command/test_create_area.py b/tests/variantstudy/model/command/test_create_area.py index 62e01aeba4..330067db56 100644 --- a/tests/variantstudy/model/command/test_create_area.py +++ b/tests/variantstudy/model/command/test_create_area.py @@ -3,7 +3,7 @@ import pytest -from antarest.study.storage.rawstudy.model.filesystem.config.model import ENR_MODELLING, transform_name_to_id +from antarest.study.storage.rawstudy.model.filesystem.config.model import EnrModelling, transform_name_to_id from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy from antarest.study.storage.variantstudy.business.command_reverter import CommandReverter from antarest.study.storage.variantstudy.model.command.create_area import CreateArea @@ -14,7 +14,7 @@ class TestCreateArea: @pytest.mark.parametrize("version", [600, 650, 810, 830, 860]) - @pytest.mark.parametrize("enr_modelling", list(ENR_MODELLING)) + @pytest.mark.parametrize("enr_modelling", list(EnrModelling)) def test_apply( self, empty_study: FileStudy, @@ -132,7 +132,7 @@ def test_apply( assert (study_path / "input" / "thermal" / "clusters" / area_id / "list.ini").exists() # Renewable Clusters - if version >= 810 and empty_study.config.enr_modelling == ENR_MODELLING.CLUSTERS.value: + if version >= 810 and empty_study.config.enr_modelling == EnrModelling.CLUSTERS.value: assert (study_path / "input" / "renewables" / "clusters" / area_id).is_dir() assert (study_path / "input" / "renewables" / "clusters" / area_id / "list.ini").exists() diff --git a/tests/variantstudy/model/command/test_create_renewables_cluster.py b/tests/variantstudy/model/command/test_create_renewables_cluster.py index ecec2fd882..bb91b82258 100644 --- a/tests/variantstudy/model/command/test_create_renewables_cluster.py +++ b/tests/variantstudy/model/command/test_create_renewables_cluster.py @@ -4,7 +4,7 @@ import pytest from pydantic import ValidationError -from antarest.study.storage.rawstudy.model.filesystem.config.model import ENR_MODELLING, transform_name_to_id +from antarest.study.storage.rawstudy.model.filesystem.config.model import EnrModelling, transform_name_to_id from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy from antarest.study.storage.variantstudy.business.command_reverter import CommandReverter from antarest.study.storage.variantstudy.model.command.common import CommandName @@ -39,7 +39,7 @@ def test_validate_cluster_name(self, command_context: CommandContext): CreateRenewablesCluster(area_id="fr", cluster_name="%", command_context=command_context, parameters={}) def test_apply(self, empty_study: FileStudy, command_context: CommandContext): - empty_study.config.enr_modelling = ENR_MODELLING.CLUSTERS.value + empty_study.config.enr_modelling = EnrModelling.CLUSTERS.value study_path = empty_study.config.study_path area_name = "DE" area_id = transform_name_to_id(area_name, lower=True) diff --git a/tests/variantstudy/model/command/test_manage_binding_constraints.py b/tests/variantstudy/model/command/test_manage_binding_constraints.py index 2ca4015808..f848883831 100644 --- a/tests/variantstudy/model/command/test_manage_binding_constraints.py +++ b/tests/variantstudy/model/command/test_manage_binding_constraints.py @@ -4,7 +4,10 @@ import pytest from antarest.study.storage.rawstudy.ini_reader import IniReader -from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import BindingConstraintFrequency +from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import ( + BindingConstraintFrequency, + BindingConstraintOperator, +) from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy from antarest.study.storage.variantstudy.business.command_extractor import CommandExtractor from antarest.study.storage.variantstudy.business.command_reverter import CommandReverter @@ -15,7 +18,6 @@ default_bc_hourly, default_bc_weekly_daily, ) -from antarest.study.storage.variantstudy.model.command.common import BindingConstraintOperator from antarest.study.storage.variantstudy.model.command.create_area import CreateArea from antarest.study.storage.variantstudy.model.command.create_binding_constraint import CreateBindingConstraint from antarest.study.storage.variantstudy.model.command.create_cluster import CreateCluster diff --git a/tests/variantstudy/model/command/test_remove_area.py b/tests/variantstudy/model/command/test_remove_area.py index 118d45e0d8..90c19d34b9 100644 --- a/tests/variantstudy/model/command/test_remove_area.py +++ b/tests/variantstudy/model/command/test_remove_area.py @@ -1,9 +1,11 @@ import pytest -from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import BindingConstraintFrequency +from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import ( + BindingConstraintFrequency, + BindingConstraintOperator, +) from antarest.study.storage.rawstudy.model.filesystem.config.model import transform_name_to_id from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy -from antarest.study.storage.variantstudy.model.command.common import BindingConstraintOperator from antarest.study.storage.variantstudy.model.command.create_area import CreateArea from antarest.study.storage.variantstudy.model.command.create_binding_constraint import CreateBindingConstraint from antarest.study.storage.variantstudy.model.command.create_cluster import CreateCluster diff --git a/tests/variantstudy/model/command/test_remove_cluster.py b/tests/variantstudy/model/command/test_remove_cluster.py index faae51f5c7..f0dd04f2b1 100644 --- a/tests/variantstudy/model/command/test_remove_cluster.py +++ b/tests/variantstudy/model/command/test_remove_cluster.py @@ -2,10 +2,12 @@ import pytest from checksumdir import dirhash -from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import BindingConstraintFrequency +from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import ( + BindingConstraintFrequency, + BindingConstraintOperator, +) from antarest.study.storage.rawstudy.model.filesystem.config.model import transform_name_to_id from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy -from antarest.study.storage.variantstudy.model.command.common import BindingConstraintOperator from antarest.study.storage.variantstudy.model.command.create_area import CreateArea from antarest.study.storage.variantstudy.model.command.create_binding_constraint import CreateBindingConstraint from antarest.study.storage.variantstudy.model.command.create_cluster import CreateCluster diff --git a/tests/variantstudy/model/command/test_remove_renewables_cluster.py b/tests/variantstudy/model/command/test_remove_renewables_cluster.py index 26eaa52837..42573e8b74 100644 --- a/tests/variantstudy/model/command/test_remove_renewables_cluster.py +++ b/tests/variantstudy/model/command/test_remove_renewables_cluster.py @@ -1,6 +1,6 @@ from checksumdir import dirhash -from antarest.study.storage.rawstudy.model.filesystem.config.model import ENR_MODELLING, transform_name_to_id +from antarest.study.storage.rawstudy.model.filesystem.config.model import EnrModelling, transform_name_to_id from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy from antarest.study.storage.variantstudy.model.command.create_area import CreateArea from antarest.study.storage.variantstudy.model.command.create_renewables_cluster import CreateRenewablesCluster @@ -11,7 +11,7 @@ class TestRemoveRenewablesCluster: def test_apply(self, empty_study: FileStudy, command_context: CommandContext): - empty_study.config.enr_modelling = ENR_MODELLING.CLUSTERS.value + empty_study.config.enr_modelling = EnrModelling.CLUSTERS.value empty_study.config.version = 810 area_name = "Area_name" area_id = transform_name_to_id(area_name) From 065efd57ef5dcee373d80a7cc45eae8cd71d83ce Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Fri, 16 Feb 2024 08:38:15 +0100 Subject: [PATCH 084/147] feat(tablemode): integrate binding constraints manager in the table mode (WIP) --- .../filesystem/config/binding_constraint.py | 465 ++++++++++++++++++ .../filesystem/matrix/date_serializer.py | 2 +- 2 files changed, 466 insertions(+), 1 deletion(-) diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/binding_constraint.py b/antarest/study/storage/rawstudy/model/filesystem/config/binding_constraint.py index 0bdf0160be..ad89278863 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/config/binding_constraint.py +++ b/antarest/study/storage/rawstudy/model/filesystem/config/binding_constraint.py @@ -1,4 +1,14 @@ +""" +Object model used to read and update binding constraint configuration. +""" +import json +import typing as t + +from pydantic import Field, validator, root_validator + from antarest.study.business.enum_ignore_case import EnumIgnoreCase +from antarest.study.storage.rawstudy.model.filesystem.config.field_validators import validate_filtering +from antarest.study.storage.rawstudy.model.filesystem.config.ini_properties import IniProperties class BindingConstraintFrequency(EnumIgnoreCase): @@ -31,3 +41,458 @@ class BindingConstraintOperator(EnumIgnoreCase): GREATER = "greater" BOTH = "both" EQUAL = "equal" + + +class AbstractTerm(IniProperties): + """ + Abstract term of a binding constraint. + + Attributes: + weight: weight of the term + offset: offset of the term + """ + + weight: float = 0.0 + offset: float = 0.0 + + def __str__(self) -> str: + """String representation used in configuration files.""" + term_id = self.bc_id + value = self.bc_value + return f"{term_id} = {value}" if term_id else value + + @property + def bc_id(self) -> str: + """Return the constraint term ID for this constraint.""" + # Method should be overridden in child class. + # It is implemented here to avoid raising an error in the debugger. + return "" + + @property + def bc_value(self) -> str: + """Return the constraint term value for this constraint.""" + return f"{self.weight}%{self.offset}" if self.offset else str(self.weight) + + +class LinkTerm(AbstractTerm): + """ + Term of a binding constraint applied to a link. + + Attributes: + weight: weight of the term + offset: offset of the term + area1_id: ID of the first area + area2_id: ID of the second area + """ + + area1_id: str = Field(alias="area1") + area2_id: str = Field(alias="area2") + + @property + def bc_id(self) -> str: + """ + Return the constraint term ID for this constraint on a link, + of the form "area1%area2". + """ + # Ensure IDs are in alphabetical order and lower case + ids = sorted((self.area1_id.lower(), self.area2_id.lower())) + return "%".join(ids) + + +class ClusterTerm(AbstractTerm): + """ + Term of a binding constraint applied to a thermal cluster. + + Attributes: + weight: weight of the term + offset: offset of the term + area_id: ID of the area + cluster_id: ID of the cluster + """ + + area_id: str = Field(alias="area") + cluster_id: str = Field(alias="cluster") + + @property + def bc_id(self) -> str: + """ + Return the constraint term ID for this constraint on thermal cluster, + of the form "area.cluster". + """ + # Ensure IDs are in lower case + ids = [self.area_id.lower(), self.cluster_id.lower()] + return ".".join(ids) + + +BindingConstraintTerm = t.Union[LinkTerm, ClusterTerm] +""" +This type represents the list of possible term types for a binding constraint. +This union can be extended with new term types in the future. +""" + + +def build_term_from_config(term_id: str, value: t.Union[str, int, float]) -> BindingConstraintTerm: + """ + Create a term from a string extracted from the configuration file. + """ + # Extract the weight and offset from the value + if isinstance(value, (int, float)): + weight, offset = float(value), 0.0 + else: + weight, offset = map(float, value.split("%")) if "%" in value else (float(value), 0.0) + + # Parse the term ID + if "%" in term_id: + # - Link: "{area1_id}%{area2_id} = {weight}" + # - Link with offset: "{area1_id}%{area2_id} = {weight}%{offset}" + area1_id, area2_id = term_id.split("%") + return LinkTerm(weight=weight, offset=offset, area1_id=area1_id, area2_id=area2_id) + + elif "." in term_id: + # - Cluster: "{area_id}.{cluster_id} = {weight}" + # - Cluster with offset: "{area_id}.{cluster_id} = {weight}%{offset}" + area_id, cluster_id = term_id.split(".") + return ClusterTerm(weight=weight, offset=offset, area_id=area_id, cluster_id=cluster_id) + + else: + raise ValueError(f"Invalid term ID: {term_id}") + + +def build_term_from_obj(obj: t.Mapping[str, t.Any]) -> BindingConstraintTerm: + """ + Create a term from a dictionary extracted from another object. + """ + for cls in BindingConstraintTerm.__args__: # type: ignore + try: + return t.cast(BindingConstraintTerm, cls.parse_obj(obj)) + except ValueError: + pass + raise ValueError(f"Invalid term object: {obj!r}") + + +def _generate_bc_id(name: t.Optional[str]) -> t.Optional[str]: + """ + Generate a binding constraint ID from the name. + Return ``None`` if the name is not set or invalid. + """ + # If the name is not set, return None + if not name: + return None + + # Lazy import to avoid circular import + from antarest.study.storage.rawstudy.model.filesystem.config.model import transform_name_to_id + + bc_id = transform_name_to_id(name, lower=True) + return bc_id or None # Ensure None if empty string + + +# noinspection SpellCheckingInspection +class BindingConstraintProperties(IniProperties): + """ + Configuration read from the `input/bindingconstraints/bindingconstraints.ini` file. + + This file contains a section for each binding constraint. + Section names correspond to a 0-based index in the list of constraints. + + But, since each binding constraint has a unique ID, we use a mapping of IDs to sections. + + >>> from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import BindingConstraintProperties + >>> from pprint import pprint + + Create and validate a new BindingConstraintProperties from a dictionary read from a configuration file. + + >>> obj = { + ... "0": { + ... "name": "DSR_AT_stock", + ... "id": "dsr_at_stock", + ... "enabled": True, + ... "type": "daily", + ... "operator": "less", + ... "at.at_dsr 0": 6.5, + ... }, + ... "1": { + ... "name": "DSR_BE_stock", + ... "enabled": False, + ... "type": "daily", + ... "operator": "greater", + ... "be.be_dsr 0": 8.3, + ... }, + ... } + + >>> bc = BindingConstraintProperties.parse_obj(obj) + >>> constraints = sorted(bc.constraints.values(), key=lambda s: s.id) + >>> pprint([s.dict(by_alias=True) for s in constraints]) + [{'comments': '', + 'enabled': True, + 'filter-synthesis': 'hourly, daily, weekly, monthly, annual', + 'filter-year-by-year': 'hourly, daily, weekly, monthly, annual', + 'id': 'dsr_at_stock', + 'name': 'DSR_AT_stock', + 'operator': , + 'terms': {'at.at_dsr 0': {'area': 'at', + 'cluster': 'at_dsr 0', + 'offset': 0.0, + 'weight': 6.5}}, + 'type': }, + {'comments': '', + 'enabled': False, + 'filter-synthesis': 'hourly, daily, weekly, monthly, annual', + 'filter-year-by-year': 'hourly, daily, weekly, monthly, annual', + 'id': 'dsr_be_stock', + 'name': 'DSR_BE_stock', + 'operator': , + 'terms': {'be.be_dsr 0': {'area': 'be', + 'cluster': 'be_dsr 0', + 'offset': 0.0, + 'weight': 8.3}}, + 'type': }] + """ + + class BindingConstraintSection(IniProperties): + """ + Configuration read from a section in the `input/bindingconstraints/bindingconstraints.ini` file. + + >>> from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import BindingConstraintProperties + >>> from pprint import pprint + + Create and validate a new BindingConstraintSection from a dictionary read from a configuration file. + + >>> obj = { + ... "name": "FB001", + ... "id": "fb001", + ... "enabled": True, + ... "type": "hourly", + ... "operator": "less", + ... "filter-synthesis": "hourly, annual", + ... "filter-year-by-year": "", + ... "at.cl1": 1, + ... "de.cl2": "-88.77%7", + ... "at%de": -0.06, + ... "at%es": "8.5%0.5", + ... } + + >>> bc = BindingConstraintProperties.BindingConstraintSection.parse_obj(obj) + >>> pprint(bc.dict(by_alias=True)) + {'comments': '', + 'enabled': True, + 'filter-synthesis': 'hourly, annual', + 'filter-year-by-year': '', + 'id': 'fb001', + 'name': 'FB001', + 'operator': , + 'terms': {'at%de': {'area1': 'at', + 'area2': 'de', + 'offset': 0.0, + 'weight': -0.06}, + 'at%es': {'area1': 'at', + 'area2': 'es', + 'offset': 0.5, + 'weight': 8.5}, + 'at.cl1': {'area': 'at', + 'cluster': 'cl1', + 'offset': 0.0, + 'weight': 1.0}, + 'de.cl2': {'area': 'de', + 'cluster': 'cl2', + 'offset': 7.0, + 'weight': -88.77}}, + 'type': } + + We can construct a BindingConstraintSection from a dictionary. + + >>> bc2 = BindingConstraintProperties.BindingConstraintSection.parse_obj(bc.dict()) + >>> bc2 == bc + True + + Convert the BindingConstraintSection to a dictionary for writing to a configuration file. + + >>> pprint(bc2.to_config()) + {'at%de': '-0.06', + 'at%es': '8.5%0.5', + 'at.cl1': '1.0', + 'comments': '', + 'de.cl2': '-88.77%7.0', + 'enabled': True, + 'filter-synthesis': 'hourly, annual', + 'filter-year-by-year': '', + 'id': 'fb001', + 'name': 'FB001', + 'operator': 'less', + 'type': 'hourly'} + """ + + id: str + name: str + enabled: bool = True + type: BindingConstraintFrequency = BindingConstraintFrequency.HOURLY + operator: BindingConstraintOperator = BindingConstraintOperator.EQUAL + comments: str = "" + filter_synthesis: str = Field(default="hourly, daily, weekly, monthly, annual", alias="filter-synthesis") + filter_year_by_year: str = Field(default="hourly, daily, weekly, monthly, annual", alias="filter-year-by-year") + + terms: t.MutableMapping[str, BindingConstraintTerm] = Field(default_factory=dict) + + @root_validator(pre=True) + def _populate_section(cls, values: t.MutableMapping[str, t.Any]) -> t.MutableMapping[str, t.Any]: + """ + Parse the section properties and terms from the configuration file. + """ + + # Extract known properties to leave only terms + new_values = { + "id": values.pop("id", None), + "name": values.pop("name", None), + "enabled": values.pop("enabled", None), + "type": values.pop("type", None), + "operator": values.pop("operator", None), + "comments": values.pop("comments", None), + "filter-synthesis": values.pop("filter_synthesis", None), + "filter-year-by-year": values.pop("filter_year_by_year", None), + } + + if new_values["id"] is None: + new_values["id"] = _generate_bc_id(new_values["name"]) + if new_values["filter-synthesis"] is None: + new_values["filter-synthesis"] = values.pop("filter-synthesis", None) + if new_values["filter-year-by-year"] is None: + new_values["filter-year-by-year"] = values.pop("filter-year-by-year", None) + + # Collect terms + new_values["terms"] = terms = {} + if "terms" in values: + for value in values.pop("terms").values(): + obj = value if isinstance(value, dict) else value.dict() + term = build_term_from_obj(obj) + terms[term.bc_id] = term + else: + for term_id, value in values.items(): + term = build_term_from_config(term_id, value) + terms[term.bc_id] = term + + # Drop `None` values so that we can use the default values, but keep "" values + new_values = {k: v for k, v in new_values.items() if v is not None} + + return new_values + + _validate_filtering = validator( + "filter_synthesis", + "filter_year_by_year", + pre=True, + allow_reuse=True, + )(validate_filtering) + + def to_config(self) -> t.Mapping[str, t.Any]: + config_values = { + "id": self.id, + "name": self.name, + "enabled": self.enabled, + "type": self.type, + "operator": self.operator, + "comments": self.comments, + "filter-synthesis": self.filter_synthesis, + "filter-year-by-year": self.filter_year_by_year, + } + + for term_id, term in sorted(self.terms.items()): + config_values[term_id] = term.bc_value + + # Convert to a dictionary for writing to a configuration file + config_values = {k: json.loads(json.dumps(v)) for k, v in config_values.items()} + + return config_values + + def insert_term(self, term: BindingConstraintTerm) -> None: + """ + Insert a new term into the section. + """ + term_id = term.bc_id + if term_id in self.terms: + raise ValueError(f"Term '{term_id}' already exists in the binding constraint '{self.id}'.") + self.terms[term_id] = term + + def remove_term(self, term_id: str) -> None: + """ + Remove a term from the section. + """ + if term_id not in self.terms: + raise ValueError(f"Term '{term_id}' does not exist in the binding constraint '{self.id}'.") + del self.terms[term_id] + + def update_term(self, term: BindingConstraintTerm) -> None: + """ + Update an existing term in the section. + """ + term_id = term.bc_id + if term_id not in self.terms: + raise ValueError(f"Term '{term_id}' does not exist in the binding constraint '{self.id}'.") + self.terms[term_id] = term + + def get_term(self, term_id: str) -> BindingConstraintTerm: + """ + Get a term from the section. + """ + if term_id not in self.terms: + raise ValueError(f"Term '{term_id}' does not exist in the binding constraint '{self.id}'.") + return self.terms[term_id] + + constraints: t.MutableMapping[str, BindingConstraintSection] = Field(default_factory=dict) + + @root_validator(pre=True) + def _populate_constraints(cls, values: t.MutableMapping[str, t.Any]) -> t.MutableMapping[str, t.Any]: + """ + Parse the sections from the configuration file. + """ + parse_obj = BindingConstraintProperties.BindingConstraintSection.parse_obj + constraints = {} + + if "constraints" in values: + # Case where the dictionary comes from another BindingConstraintProperties object + for section_value in values["constraints"].values(): + obj = section_value if isinstance(section_value, dict) else section_value.dict() + bc = parse_obj(obj) + constraints[bc.id] = bc + + else: + # Case where the dictionary comes from a configuration file + for section_value in values.values(): + section = parse_obj(section_value) + constraints[section.id] = section + + return {"constraints": constraints} + + def to_config(self) -> t.Mapping[str, t.Any]: + # Constraints are sorted by ID to ensure consistent output for testing + constraints = sorted(self.constraints.values(), key=lambda section: section.id) + return {str(i): section.to_config() for i, section in enumerate(constraints)} + + def insert_constraint(self, constraint: BindingConstraintSection) -> None: + """ + Insert a new constraint into the configuration. + """ + if constraint.id in self.constraints: + raise ValueError(f"Constraint '{constraint.id}' already exists in the configuration.") + self.constraints[constraint.id] = constraint + + def remove_constraint(self, constraint_id: str) -> None: + """ + Remove a constraint from the configuration. + """ + if constraint_id not in self.constraints: + raise ValueError(f"Constraint '{constraint_id}' does not exist in the configuration.") + del self.constraints[constraint_id] + + def update_constraint(self, constraint: BindingConstraintSection) -> None: + """ + Update an existing constraint in the configuration. + """ + if constraint.id not in self.constraints: + raise ValueError(f"Constraint '{constraint.id}' does not exist in the configuration.") + self.constraints[constraint.id] = constraint + + def get_constraint(self, constraint_id: str) -> BindingConstraintSection: + """ + Get a constraint from the configuration. + """ + if constraint_id not in self.constraints: + raise ValueError(f"Constraint '{constraint_id}' does not exist in the configuration.") + return self.constraints[constraint_id] diff --git a/antarest/study/storage/rawstudy/model/filesystem/matrix/date_serializer.py b/antarest/study/storage/rawstudy/model/filesystem/matrix/date_serializer.py index 37e0badb0c..909140b68a 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/matrix/date_serializer.py +++ b/antarest/study/storage/rawstudy/model/filesystem/matrix/date_serializer.py @@ -68,7 +68,7 @@ class HourlyMatrixSerializer(IDateMatrixSerializer): def build_date(self, index: pd.Index) -> pd.DataFrame: def _map(row: str) -> Tuple[str, int, str, str, str]: - m, d, h = re.split("[\s/]", row) + m, d, h = re.split(r"[\s/]", row) return "", 1, d, IDateMatrixSerializer._R_MONTHS[m], h items = index.map(_map).tolist() From ad9ea058c12820687641a487e122168f0fc47a2d Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Sun, 3 Mar 2024 19:06:05 +0100 Subject: [PATCH 085/147] feat(tablemode): integrate binding constraints manager in the table mode --- .../business/binding_constraint_management.py | 100 ++++++++++++------ .../study/business/table_mode_management.py | 5 +- .../filesystem/config/binding_constraint.py | 33 +++--- .../study_data_blueprint/test_table_mode.py | 79 ++++++++++++++ 4 files changed, 166 insertions(+), 51 deletions(-) diff --git a/antarest/study/business/binding_constraint_management.py b/antarest/study/business/binding_constraint_management.py index b642a26f79..26cbd91467 100644 --- a/antarest/study/business/binding_constraint_management.py +++ b/antarest/study/business/binding_constraint_management.py @@ -2,14 +2,15 @@ import itertools import json import logging -from typing import Any, Dict, List, Mapping, MutableSequence, Optional, Sequence, Tuple, Union +import typing as t import numpy as np from pydantic import BaseModel, Field, root_validator, validator from requests.utils import CaseInsensitiveDict from antarest.core.exceptions import ( - BindingConstraintNotFoundError, + BindingConstraintNotFound, + ConfigFileNotFound, ConstraintAlreadyExistError, ConstraintIdNotFoundError, DuplicateConstraintName, @@ -27,6 +28,7 @@ from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import ( BindingConstraintFrequency, BindingConstraintOperator, + BindingConstraintProperties as ConfigBCProperties, ) from antarest.study.storage.rawstudy.model.filesystem.config.model import transform_name_to_id from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy @@ -109,13 +111,13 @@ class ConstraintTerm(BaseModel): data: the constraint term data (link or cluster), if any. """ - id: Optional[str] - weight: Optional[float] - offset: Optional[int] - data: Optional[Union[LinkTerm, ClusterTerm]] + id: t.Optional[str] + weight: t.Optional[float] + offset: t.Optional[int] + data: t.Optional[t.Union[LinkTerm, ClusterTerm]] @validator("id") - def id_to_lower(cls, v: Optional[str]) -> Optional[str]: + def id_to_lower(cls, v: t.Optional[str]) -> t.Optional[str]: """Ensure the ID is lower case.""" if v is None: return None @@ -146,11 +148,11 @@ class ConstraintFilters(BaseModel, frozen=True, extra="forbid"): """ bc_id: str = "" - enabled: Optional[bool] = None - operator: Optional[BindingConstraintOperator] = None + enabled: t.Optional[bool] = None + operator: t.Optional[BindingConstraintOperator] = None comments: str = "" group: str = "" - time_step: Optional[BindingConstraintFrequency] = None + time_step: t.Optional[BindingConstraintFrequency] = None area_name: str = "" cluster_name: str = "" link_id: str = "" @@ -236,7 +238,7 @@ class ConstraintInput870(OptionalProperties): @camel_case_model class ConstraintInput(BindingConstraintMatrices, ConstraintInput870): - terms: MutableSequence[ConstraintTerm] = Field( + terms: t.MutableSequence[ConstraintTerm] = Field( default_factory=lambda: [], ) @@ -246,7 +248,7 @@ class ConstraintCreation(ConstraintInput): name: str @root_validator(pre=True) - def check_matrices_dimensions(cls, values: Dict[str, Any]) -> Dict[str, Any]: + def check_matrices_dimensions(cls, values: t.Dict[str, t.Any]) -> t.Dict[str, t.Any]: for _key in ["time_step"] + TERM_MATRICES: _camel = to_camel_case(_key) values[_key] = values.pop(_camel, values.get(_key)) @@ -300,7 +302,7 @@ def check_matrices_dimensions(cls, values: Dict[str, Any]) -> Dict[str, Any]: class ConstraintOutputBase(BindingConstraintPropertiesBase): id: str name: str - terms: MutableSequence[ConstraintTerm] = Field(default_factory=lambda: []) + terms: t.MutableSequence[ConstraintTerm] = Field(default_factory=lambda: []) @camel_case_model @@ -316,11 +318,11 @@ class ConstraintOutput870(ConstraintOutput830): # WARNING: Do not change the order of the following line, it is used to determine # the type of the output constraint in the FastAPI endpoint. -ConstraintOutput = Union[ConstraintOutputBase, ConstraintOutput830, ConstraintOutput870] +ConstraintOutput = t.Union[ConstraintOutputBase, ConstraintOutput830, ConstraintOutput870] def _get_references_by_widths( - file_study: FileStudy, bcs: Sequence[ConstraintOutput] + file_study: FileStudy, bcs: t.Sequence[ConstraintOutput] ) -> Mapping[int, Sequence[Tuple[str, str]]]: """ Iterates over each BC and its associated matrices. @@ -336,7 +338,7 @@ def _get_references_by_widths( else: matrix_id_fmts = {"{bc_id}_eq", "{bc_id}_lt", "{bc_id}_gt"} - references_by_width: Dict[int, List[Tuple[str, str]]] = {} + references_by_width: t.Dict[int, t.List[t.Tuple[str, str]]] = {} _total = len(bcs) * len(matrix_id_fmts) for _index, (bc, fmt) in enumerate(itertools.product(bcs, matrix_id_fmts), 1): bc_id = bc.id @@ -389,6 +391,10 @@ def _validate_binding_constraints(file_study: FileStudy, bcs: Sequence[Constrain return True +# noinspection SpellCheckingInspection +_ALL_BINDING_CONSTRAINTS_PATH = "input/bindingconstraints/bindingconstraints" + + class BindingConstraintManager: def __init__( self, @@ -397,7 +403,7 @@ def __init__( self.storage_service = storage_service @staticmethod - def parse_and_add_terms(key: str, value: Any, adapted_constraint: ConstraintOutput) -> None: + def parse_and_add_terms(key: str, value: t.Any, adapted_constraint: ConstraintOutput) -> None: """Parse a single term from the constraint dictionary and add it to the adapted_constraint model.""" if "%" in key or "." in key: separator = "%" if "%" in key else "." @@ -431,7 +437,7 @@ def parse_and_add_terms(key: str, value: Any, adapted_constraint: ConstraintOutp ) @staticmethod - def constraint_model_adapter(constraint: Mapping[str, Any], version: int) -> ConstraintOutput: + def constraint_model_adapter(constraint: t.Mapping[str, t.Any], version: int) -> ConstraintOutput: """ Adapts a binding constraint configuration to the appropriate model version. @@ -489,7 +495,7 @@ def constraint_model_adapter(constraint: Mapping[str, Any], version: int) -> Con return adapted_constraint @staticmethod - def terms_to_coeffs(terms: Sequence[ConstraintTerm]) -> Dict[str, List[float]]: + def terms_to_coeffs(terms: t.Sequence[ConstraintTerm]) -> t.Dict[str, t.List[float]]: """ Converts a sequence of terms into a dictionary mapping each term's ID to its coefficients, including the weight and, optionally, the offset. @@ -518,7 +524,7 @@ def get_binding_constraint(self, study: Study, bc_id: str) -> ConstraintOutput: A ConstraintOutput object representing the binding constraint with the specified ID. Raises: - BindingConstraintNotFoundError: If no binding constraint with the specified ID is found. + BindingConstraintNotFound: If no binding constraint with the specified ID is found. """ storage_service = self.storage_service.get_storage(study) file_study = storage_service.get_raw(study) @@ -531,13 +537,13 @@ def get_binding_constraint(self, study: Study, bc_id: str) -> ConstraintOutput: constraints_by_id[constraint_config.id] = constraint_config if bc_id not in constraints_by_id: - raise BindingConstraintNotFoundError(f"Binding constraint '{bc_id}' not found") + raise BindingConstraintNotFound(f"Binding constraint '{bc_id}' not found") return constraints_by_id[bc_id] def get_binding_constraints( self, study: Study, filters: ConstraintFilters = ConstraintFilters() - ) -> Sequence[ConstraintOutput]: + ) -> t.Sequence[ConstraintOutput]: """ Retrieves all binding constraints within a given study, optionally filtered by specific criteria. @@ -555,7 +561,7 @@ def get_binding_constraints( filtered_constraints = list(filter(lambda c: filters.match_filters(c), outputs)) return filtered_constraints - def get_grouped_constraints(self, study: Study) -> Mapping[str, Sequence[ConstraintOutput]]: + def get_grouped_constraints(self, study: Study) -> t.Mapping[str, t.Sequence[ConstraintOutput]]: """ Retrieves and groups all binding constraints by their group names within a given study. @@ -584,7 +590,7 @@ def get_grouped_constraints(self, study: Study) -> Mapping[str, Sequence[Constra return grouped_constraints - def get_constraints_by_group(self, study: Study, group_name: str) -> Sequence[ConstraintOutput]: + def get_constraints_by_group(self, study: Study, group_name: str) -> t.Sequence[ConstraintOutput]: """ Retrieve all binding constraints belonging to a specified group within a study. @@ -596,12 +602,12 @@ def get_constraints_by_group(self, study: Study, group_name: str) -> Sequence[Co A list of ConstraintOutput objects that belong to the specified group. Raises: - BindingConstraintNotFoundError: If the specified group name is not found among the constraint groups. + BindingConstraintNotFound: If the specified group name is not found among the constraint groups. """ grouped_constraints = self.get_grouped_constraints(study) if group_name not in grouped_constraints: - raise BindingConstraintNotFoundError(f"Group '{group_name}' not found") + raise BindingConstraintNotFound(f"Group '{group_name}' not found") return grouped_constraints[group_name] @@ -622,14 +628,14 @@ def validate_constraint_group(self, study: Study, group_name: str) -> bool: True if the group exists and the constraints within the group are valid; False otherwise. Raises: - BindingConstraintNotFoundError: If no matching group name is found in a case-insensitive manner. + BindingConstraintNotFound: If no matching group name is found in a case-insensitive manner. """ storage_service = self.storage_service.get_storage(study) file_study = storage_service.get_raw(study) grouped_constraints = self.get_grouped_constraints(study) if group_name not in grouped_constraints: - raise BindingConstraintNotFoundError(f"Group '{group_name}' not found") + raise BindingConstraintNotFound(f"Group '{group_name}' not found") constraints = grouped_constraints[group_name] return _validate_binding_constraints(file_study, constraints) @@ -769,7 +775,7 @@ def remove_binding_constraint(self, study: Study, binding_constraint_id: str) -> binding_constraint_id: The ID of the binding constraint to remove. Raises: - BindingConstraintNotFoundError: If no binding constraint with the specified ID is found. + BindingConstraintNotFound: If no binding constraint with the specified ID is found. """ # Check the existence of the binding constraint before removing it bc = self.get_binding_constraint(study, binding_constraint_id) @@ -863,10 +869,38 @@ def remove_constraint_term( ) -> None: return self.update_constraint_term(study, binding_constraint_id, term_id) # type: ignore + def get_all_binding_constraints_props( + self, + study: Study, + ) -> t.Mapping[str, ConstraintOutput]: + """ + Retrieve all binding constraints properties from the study. + + Args: + study: Study from which to retrieve the storages. + + Returns: + A mapping of binding constraint IDs to their properties. + + # Raises: + # STStorageConfigNotFound: If no storages are found in the specified area. + """ + file_study = self.storage_service.get_storage(study).get_raw(study) + + path = _ALL_BINDING_CONSTRAINTS_PATH + try: + bc_config = file_study.tree.get(path.split("/"), depth=3) + except KeyError: + raise ConfigFileNotFound(path) from None + + bc_props = ConfigBCProperties.parse_obj(bc_config) + bc_map = {bc_id: GetBindingConstraintDTO.create_dto(bc) for bc_id, bc in bc_props.constraints.items()} + return bc_map + def _replace_matrices_according_to_frequency_and_version( - data: ConstraintInput, version: int, args: Dict[str, Any] -) -> Dict[str, Any]: + data: ConstraintInput, version: int, args: t.Dict[str, t.Any] +) -> t.Dict[str, t.Any]: if version < 870: if "values" not in args: matrix = { @@ -887,7 +921,7 @@ def _replace_matrices_according_to_frequency_and_version( return args -def find_constraint_term_id(constraints_term: Sequence[ConstraintTerm], constraint_term_id: str) -> int: +def find_constraint_term_id(constraints_term: t.Sequence[ConstraintTerm], constraint_term_id: str) -> int: try: index = [elm.id for elm in constraints_term].index(constraint_term_id) return index @@ -895,7 +929,7 @@ def find_constraint_term_id(constraints_term: Sequence[ConstraintTerm], constrai return -1 -def check_attributes_coherence(data: Union[ConstraintCreation, ConstraintInput], study_version: int) -> None: +def check_attributes_coherence(data: t.Union[ConstraintCreation, ConstraintInput], study_version: int) -> None: if study_version < 870: if data.group: raise InvalidFieldForVersionError( diff --git a/antarest/study/business/table_mode_management.py b/antarest/study/business/table_mode_management.py index 8f4791aff4..051fc06777 100644 --- a/antarest/study/business/table_mode_management.py +++ b/antarest/study/business/table_mode_management.py @@ -702,7 +702,10 @@ def get_table_data( for storage in storages } elif table_type == TableTemplateType.BINDING_CONSTRAINT: - pass + bc_map = self._binding_constraint_manager.get_all_binding_constraints_props(study) + data = {bc_id: bc.dict(by_alias=True) for bc_id, bc in bc_map.items()} + else: # pragma: no cover + raise NotImplementedError(f"Table type {table_type} not implemented") df = pd.DataFrame.from_dict(data, orient="index") if columns: diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/binding_constraint.py b/antarest/study/storage/rawstudy/model/filesystem/config/binding_constraint.py index ad89278863..cf55af84d8 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/config/binding_constraint.py +++ b/antarest/study/storage/rawstudy/model/filesystem/config/binding_constraint.py @@ -4,7 +4,7 @@ import json import typing as t -from pydantic import Field, validator, root_validator +from pydantic import Field, root_validator, validator from antarest.study.business.enum_ignore_case import EnumIgnoreCase from antarest.study.storage.rawstudy.model.filesystem.config.field_validators import validate_filtering @@ -53,7 +53,7 @@ class AbstractTerm(IniProperties): """ weight: float = 0.0 - offset: float = 0.0 + offset: int = 0 def __str__(self) -> str: """String representation used in configuration files.""" @@ -231,7 +231,7 @@ class BindingConstraintProperties(IniProperties): 'operator': , 'terms': {'at.at_dsr 0': {'area': 'at', 'cluster': 'at_dsr 0', - 'offset': 0.0, + 'offset': 0, 'weight': 6.5}}, 'type': }, {'comments': '', @@ -243,7 +243,7 @@ class BindingConstraintProperties(IniProperties): 'operator': , 'terms': {'be.be_dsr 0': {'area': 'be', 'cluster': 'be_dsr 0', - 'offset': 0.0, + 'offset': 0, 'weight': 8.3}}, 'type': }] """ @@ -268,7 +268,7 @@ class BindingConstraintSection(IniProperties): ... "at.cl1": 1, ... "de.cl2": "-88.77%7", ... "at%de": -0.06, - ... "at%es": "8.5%0.5", + ... "at%es": "8.5%5", ... } >>> bc = BindingConstraintProperties.BindingConstraintSection.parse_obj(obj) @@ -282,19 +282,16 @@ class BindingConstraintSection(IniProperties): 'operator': , 'terms': {'at%de': {'area1': 'at', 'area2': 'de', - 'offset': 0.0, + 'offset': 0, 'weight': -0.06}, - 'at%es': {'area1': 'at', - 'area2': 'es', - 'offset': 0.5, - 'weight': 8.5}, + 'at%es': {'area1': 'at', 'area2': 'es', 'offset': 5, 'weight': 8.5}, 'at.cl1': {'area': 'at', 'cluster': 'cl1', - 'offset': 0.0, + 'offset': 0, 'weight': 1.0}, 'de.cl2': {'area': 'de', 'cluster': 'cl2', - 'offset': 7.0, + 'offset': 7, 'weight': -88.77}}, 'type': } @@ -308,10 +305,10 @@ class BindingConstraintSection(IniProperties): >>> pprint(bc2.to_config()) {'at%de': '-0.06', - 'at%es': '8.5%0.5', + 'at%es': '8.5%5', 'at.cl1': '1.0', 'comments': '', - 'de.cl2': '-88.77%7.0', + 'de.cl2': '-88.77%7', 'enabled': True, 'filter-synthesis': 'hourly, annual', 'filter-year-by-year': '', @@ -324,7 +321,7 @@ class BindingConstraintSection(IniProperties): id: str name: str enabled: bool = True - type: BindingConstraintFrequency = BindingConstraintFrequency.HOURLY + time_step: BindingConstraintFrequency = Field(default=BindingConstraintFrequency.HOURLY, alias="type") operator: BindingConstraintOperator = BindingConstraintOperator.EQUAL comments: str = "" filter_synthesis: str = Field(default="hourly, daily, weekly, monthly, annual", alias="filter-synthesis") @@ -343,7 +340,7 @@ def _populate_section(cls, values: t.MutableMapping[str, t.Any]) -> t.MutableMap "id": values.pop("id", None), "name": values.pop("name", None), "enabled": values.pop("enabled", None), - "type": values.pop("type", None), + "type": values.pop("time_step", None), "operator": values.pop("operator", None), "comments": values.pop("comments", None), "filter-synthesis": values.pop("filter_synthesis", None), @@ -352,6 +349,8 @@ def _populate_section(cls, values: t.MutableMapping[str, t.Any]) -> t.MutableMap if new_values["id"] is None: new_values["id"] = _generate_bc_id(new_values["name"]) + if new_values["type"] is None: + new_values["type"] = values.pop("type", None) if new_values["filter-synthesis"] is None: new_values["filter-synthesis"] = values.pop("filter-synthesis", None) if new_values["filter-year-by-year"] is None: @@ -386,7 +385,7 @@ def to_config(self) -> t.Mapping[str, t.Any]: "id": self.id, "name": self.name, "enabled": self.enabled, - "type": self.type, + "type": self.time_step, "operator": self.operator, "comments": self.comments, "filter-synthesis": self.filter_synthesis, diff --git a/tests/integration/study_data_blueprint/test_table_mode.py b/tests/integration/study_data_blueprint/test_table_mode.py index 6a5a39c976..5582b26629 100644 --- a/tests/integration/study_data_blueprint/test_table_mode.py +++ b/tests/integration/study_data_blueprint/test_table_mode.py @@ -439,3 +439,82 @@ def test_lifecycle__nominal( } actual = res.json() assert actual == expected + + # Table Mode - Binding Constraints + # ================================ + + # Prepare data for binding constraints tests + # Create a cluster in fr + fr_id = "fr" + res = client.post( + f"/v1/studies/{study_id}/areas/{fr_id}/clusters/thermal", + headers=user_headers, + json={ + "name": "Cluster 1", + "group": "Nuclear", + }, + ) + assert res.status_code == 200, res.json() + cluster_id = res.json()["id"] + assert cluster_id == "Cluster 1" + + # Create Binding Constraints + res = client.post( + f"/v1/studies/{study_id}/bindingconstraints", + json={ + "name": "Binding Constraint 1", + "enabled": True, + "time_step": "hourly", + "operator": "less", + "coeffs": {}, + }, + headers=user_headers, + ) + assert res.status_code == 200, res.json() + + res = client.post( + f"/v1/studies/{study_id}/bindingconstraints", + json={ + "name": "Binding Constraint 2", + "enabled": False, + "time_step": "daily", + "operator": "greater", + "coeffs": {}, + "comments": "This is a binding constraint", + "filter_synthesis": "hourly, daily, weekly", + }, + headers=user_headers, + ) + assert res.status_code == 200, res.json() + + res = client.get( + f"/v1/studies/{study_id}/tablemode", + headers=user_headers, + params={ + "table_type": "binding constraint", + "columns": "", + }, + ) + assert res.status_code == 200, res.json() + expected = { + "binding constraint 1": { + "comments": "", + "enabled": True, + "filterSynthesis": "hourly, daily, weekly, monthly, " "annual", + "filterYearByYear": "hourly, daily, weekly, monthly, " "annual", + "name": "Binding Constraint 1", + "operator": "less", + "timeStep": "hourly", + }, + "binding constraint 2": { + "comments": "This is a binding constraint", + "enabled": False, + "filterSynthesis": "hourly, daily, weekly", + "filterYearByYear": "hourly, daily, weekly, monthly, " "annual", + "name": "Binding Constraint 2", + "operator": "greater", + "timeStep": "daily", + }, + } + actual = res.json() + assert actual == expected From ab9c0d9be5cdc4a416157a0f618447d6414b080a Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Wed, 13 Mar 2024 16:49:06 +0100 Subject: [PATCH 086/147] feat(tablemode): the `columns` parameter is now optional --- .../study_data_blueprint/test_table_mode.py | 18 +----------------- 1 file changed, 1 insertion(+), 17 deletions(-) diff --git a/tests/integration/study_data_blueprint/test_table_mode.py b/tests/integration/study_data_blueprint/test_table_mode.py index 5582b26629..23621c0184 100644 --- a/tests/integration/study_data_blueprint/test_table_mode.py +++ b/tests/integration/study_data_blueprint/test_table_mode.py @@ -59,23 +59,7 @@ def test_lifecycle__nominal( res = client.get( f"/v1/studies/{study_id}/tablemode", headers=user_headers, - params={ - "table_type": "area", - "columns": ",".join( - [ - "nonDispatchablePower", - "dispatchableHydroPower", - "otherDispatchablePower", - "averageUnsuppliedEnergyCost", - "spreadUnsuppliedEnergyCost", - "averageSpilledEnergyCost", - "spreadSpilledEnergyCost", - "filterSynthesis", - "filterYearByYear", - "adequacyPatchMode", - ] - ), - }, + params={"table_type": "area"}, ) assert res.status_code == 200, res.json() expected = { From b92b81229d5ee419e4ade26852373855eb45610b Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Wed, 13 Mar 2024 16:49:19 +0100 Subject: [PATCH 087/147] feat(tablemode): the `columns` parameter is now optional --- antarest/study/web/study_data_blueprint.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/antarest/study/web/study_data_blueprint.py b/antarest/study/web/study_data_blueprint.py index 2133d9819f..c8df7bbbf6 100644 --- a/antarest/study/web/study_data_blueprint.py +++ b/antarest/study/web/study_data_blueprint.py @@ -846,7 +846,7 @@ def set_timeseries_form_values( def get_table_mode( uuid: str, table_type: TableTemplateType, - columns: str, + columns: str = "", current_user: JWTUser = Depends(auth.get_current_user), ) -> TableDataDTO: logger.info( From eaa04980e25b9fedc49913e5a31efec66f7ad1e9 Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Fri, 15 Mar 2024 23:19:49 +0100 Subject: [PATCH 088/147] feat(tablemode): add support for binding constraints v8.7 --- antarest/core/exceptions.py | 2 +- antarest/study/business/area_management.py | 2 +- .../business/binding_constraint_management.py | 31 +- .../study/business/table_mode_management.py | 16 +- .../filesystem/config/binding_constraint.py | 460 ------------------ .../command/remove_binding_constraint.py | 2 +- antarest/study/web/study_data_blueprint.py | 8 +- .../test_binding_constraints.py | 6 +- .../study_data_blueprint/test_table_mode.py | 10 +- 9 files changed, 23 insertions(+), 514 deletions(-) diff --git a/antarest/core/exceptions.py b/antarest/core/exceptions.py index ac6a980ba1..87804de393 100644 --- a/antarest/core/exceptions.py +++ b/antarest/core/exceptions.py @@ -366,7 +366,7 @@ def __init__(self, message: str) -> None: super().__init__(HTTPStatus.BAD_REQUEST, message) -class BindingConstraintNotFoundError(HTTPException): +class BindingConstraintNotFound(HTTPException): def __init__(self, message: str) -> None: super().__init__(HTTPStatus.NOT_FOUND, message) diff --git a/antarest/study/business/area_management.py b/antarest/study/business/area_management.py index b03f0cdae9..9c4967ca66 100644 --- a/antarest/study/business/area_management.py +++ b/antarest/study/business/area_management.py @@ -5,7 +5,7 @@ from pydantic import BaseModel, Extra, Field -from antarest.core.exceptions import DuplicateAreaName, ConfigFileNotFound, LayerNotAllowedToBeDeleted, LayerNotFound +from antarest.core.exceptions import ConfigFileNotFound, DuplicateAreaName, LayerNotAllowedToBeDeleted, LayerNotFound from antarest.study.business.utils import AllOptionalMetaclass, camel_case_model, execute_or_add_commands from antarest.study.model import Patch, PatchArea, PatchCluster, RawStudy, Study from antarest.study.repository import StudyMetadataRepository diff --git a/antarest/study/business/binding_constraint_management.py b/antarest/study/business/binding_constraint_management.py index 26cbd91467..6dca21b306 100644 --- a/antarest/study/business/binding_constraint_management.py +++ b/antarest/study/business/binding_constraint_management.py @@ -14,6 +14,7 @@ ConstraintAlreadyExistError, ConstraintIdNotFoundError, DuplicateConstraintName, + IncoherenceBetweenMatricesLength, InvalidConstraintName, InvalidFieldForVersionError, MatrixWidthMismatchError, @@ -28,7 +29,6 @@ from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import ( BindingConstraintFrequency, BindingConstraintOperator, - BindingConstraintProperties as ConfigBCProperties, ) from antarest.study.storage.rawstudy.model.filesystem.config.model import transform_name_to_id from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy @@ -45,7 +45,6 @@ from antarest.study.storage.variantstudy.business.matrix_constants.binding_constraint.series_before_v87 import ( default_bc_weekly_daily as default_bc_weekly_daily_86, ) -from antarest.study.storage.variantstudy.model.command.common import BindingConstraintOperator from antarest.study.storage.variantstudy.model.command.create_binding_constraint import ( DEFAULT_GROUP, EXPECTED_MATRIX_SHAPES, @@ -869,34 +868,6 @@ def remove_constraint_term( ) -> None: return self.update_constraint_term(study, binding_constraint_id, term_id) # type: ignore - def get_all_binding_constraints_props( - self, - study: Study, - ) -> t.Mapping[str, ConstraintOutput]: - """ - Retrieve all binding constraints properties from the study. - - Args: - study: Study from which to retrieve the storages. - - Returns: - A mapping of binding constraint IDs to their properties. - - # Raises: - # STStorageConfigNotFound: If no storages are found in the specified area. - """ - file_study = self.storage_service.get_storage(study).get_raw(study) - - path = _ALL_BINDING_CONSTRAINTS_PATH - try: - bc_config = file_study.tree.get(path.split("/"), depth=3) - except KeyError: - raise ConfigFileNotFound(path) from None - - bc_props = ConfigBCProperties.parse_obj(bc_config) - bc_map = {bc_id: GetBindingConstraintDTO.create_dto(bc) for bc_id, bc in bc_props.constraints.items()} - return bc_map - def _replace_matrices_according_to_frequency_and_version( data: ConstraintInput, version: int, args: t.Dict[str, t.Any] diff --git a/antarest/study/business/table_mode_management.py b/antarest/study/business/table_mode_management.py index 051fc06777..66379b80c6 100644 --- a/antarest/study/business/table_mode_management.py +++ b/antarest/study/business/table_mode_management.py @@ -681,29 +681,29 @@ def get_table_data( f"{area1_id} / {area2_id}": link.dict(by_alias=True) for (area1_id, area2_id), link in links_map.items() } elif table_type == TableTemplateType.THERMAL_CLUSTER: - clusters_map = self._thermal_manager.get_all_thermals_props(study) + thermals_map = self._thermal_manager.get_all_thermals_props(study) data = { f"{area_id} / {cluster.id}": cluster.dict(by_alias=True) - for area_id, clusters in clusters_map.items() + for area_id, clusters in thermals_map.items() for cluster in clusters } elif table_type == TableTemplateType.RENEWABLE_CLUSTER: - clusters_map = self._renewable_manager.get_all_renewables_props(study) + renewables_map = self._renewable_manager.get_all_renewables_props(study) data = { f"{area_id} / {cluster.id}": cluster.dict(by_alias=True) - for area_id, clusters in clusters_map.items() + for area_id, clusters in renewables_map.items() for cluster in clusters } elif table_type == TableTemplateType.ST_STORAGE: - storage_map = self._st_storage_manager.get_all_storages_props(study) + storages_map = self._st_storage_manager.get_all_storages_props(study) data = { f"{area_id} / {storage.id}": storage.dict(by_alias=True) - for area_id, storages in storage_map.items() + for area_id, storages in storages_map.items() for storage in storages } elif table_type == TableTemplateType.BINDING_CONSTRAINT: - bc_map = self._binding_constraint_manager.get_all_binding_constraints_props(study) - data = {bc_id: bc.dict(by_alias=True) for bc_id, bc in bc_map.items()} + bc_seq = self._binding_constraint_manager.get_binding_constraints(study) + data = {bc.id: bc.dict(by_alias=True, exclude={"id", "name", "terms"}) for bc in bc_seq} else: # pragma: no cover raise NotImplementedError(f"Table type {table_type} not implemented") diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/binding_constraint.py b/antarest/study/storage/rawstudy/model/filesystem/config/binding_constraint.py index cf55af84d8..11749cf456 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/config/binding_constraint.py +++ b/antarest/study/storage/rawstudy/model/filesystem/config/binding_constraint.py @@ -1,14 +1,8 @@ """ Object model used to read and update binding constraint configuration. """ -import json -import typing as t - -from pydantic import Field, root_validator, validator from antarest.study.business.enum_ignore_case import EnumIgnoreCase -from antarest.study.storage.rawstudy.model.filesystem.config.field_validators import validate_filtering -from antarest.study.storage.rawstudy.model.filesystem.config.ini_properties import IniProperties class BindingConstraintFrequency(EnumIgnoreCase): @@ -41,457 +35,3 @@ class BindingConstraintOperator(EnumIgnoreCase): GREATER = "greater" BOTH = "both" EQUAL = "equal" - - -class AbstractTerm(IniProperties): - """ - Abstract term of a binding constraint. - - Attributes: - weight: weight of the term - offset: offset of the term - """ - - weight: float = 0.0 - offset: int = 0 - - def __str__(self) -> str: - """String representation used in configuration files.""" - term_id = self.bc_id - value = self.bc_value - return f"{term_id} = {value}" if term_id else value - - @property - def bc_id(self) -> str: - """Return the constraint term ID for this constraint.""" - # Method should be overridden in child class. - # It is implemented here to avoid raising an error in the debugger. - return "" - - @property - def bc_value(self) -> str: - """Return the constraint term value for this constraint.""" - return f"{self.weight}%{self.offset}" if self.offset else str(self.weight) - - -class LinkTerm(AbstractTerm): - """ - Term of a binding constraint applied to a link. - - Attributes: - weight: weight of the term - offset: offset of the term - area1_id: ID of the first area - area2_id: ID of the second area - """ - - area1_id: str = Field(alias="area1") - area2_id: str = Field(alias="area2") - - @property - def bc_id(self) -> str: - """ - Return the constraint term ID for this constraint on a link, - of the form "area1%area2". - """ - # Ensure IDs are in alphabetical order and lower case - ids = sorted((self.area1_id.lower(), self.area2_id.lower())) - return "%".join(ids) - - -class ClusterTerm(AbstractTerm): - """ - Term of a binding constraint applied to a thermal cluster. - - Attributes: - weight: weight of the term - offset: offset of the term - area_id: ID of the area - cluster_id: ID of the cluster - """ - - area_id: str = Field(alias="area") - cluster_id: str = Field(alias="cluster") - - @property - def bc_id(self) -> str: - """ - Return the constraint term ID for this constraint on thermal cluster, - of the form "area.cluster". - """ - # Ensure IDs are in lower case - ids = [self.area_id.lower(), self.cluster_id.lower()] - return ".".join(ids) - - -BindingConstraintTerm = t.Union[LinkTerm, ClusterTerm] -""" -This type represents the list of possible term types for a binding constraint. -This union can be extended with new term types in the future. -""" - - -def build_term_from_config(term_id: str, value: t.Union[str, int, float]) -> BindingConstraintTerm: - """ - Create a term from a string extracted from the configuration file. - """ - # Extract the weight and offset from the value - if isinstance(value, (int, float)): - weight, offset = float(value), 0.0 - else: - weight, offset = map(float, value.split("%")) if "%" in value else (float(value), 0.0) - - # Parse the term ID - if "%" in term_id: - # - Link: "{area1_id}%{area2_id} = {weight}" - # - Link with offset: "{area1_id}%{area2_id} = {weight}%{offset}" - area1_id, area2_id = term_id.split("%") - return LinkTerm(weight=weight, offset=offset, area1_id=area1_id, area2_id=area2_id) - - elif "." in term_id: - # - Cluster: "{area_id}.{cluster_id} = {weight}" - # - Cluster with offset: "{area_id}.{cluster_id} = {weight}%{offset}" - area_id, cluster_id = term_id.split(".") - return ClusterTerm(weight=weight, offset=offset, area_id=area_id, cluster_id=cluster_id) - - else: - raise ValueError(f"Invalid term ID: {term_id}") - - -def build_term_from_obj(obj: t.Mapping[str, t.Any]) -> BindingConstraintTerm: - """ - Create a term from a dictionary extracted from another object. - """ - for cls in BindingConstraintTerm.__args__: # type: ignore - try: - return t.cast(BindingConstraintTerm, cls.parse_obj(obj)) - except ValueError: - pass - raise ValueError(f"Invalid term object: {obj!r}") - - -def _generate_bc_id(name: t.Optional[str]) -> t.Optional[str]: - """ - Generate a binding constraint ID from the name. - Return ``None`` if the name is not set or invalid. - """ - # If the name is not set, return None - if not name: - return None - - # Lazy import to avoid circular import - from antarest.study.storage.rawstudy.model.filesystem.config.model import transform_name_to_id - - bc_id = transform_name_to_id(name, lower=True) - return bc_id or None # Ensure None if empty string - - -# noinspection SpellCheckingInspection -class BindingConstraintProperties(IniProperties): - """ - Configuration read from the `input/bindingconstraints/bindingconstraints.ini` file. - - This file contains a section for each binding constraint. - Section names correspond to a 0-based index in the list of constraints. - - But, since each binding constraint has a unique ID, we use a mapping of IDs to sections. - - >>> from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import BindingConstraintProperties - >>> from pprint import pprint - - Create and validate a new BindingConstraintProperties from a dictionary read from a configuration file. - - >>> obj = { - ... "0": { - ... "name": "DSR_AT_stock", - ... "id": "dsr_at_stock", - ... "enabled": True, - ... "type": "daily", - ... "operator": "less", - ... "at.at_dsr 0": 6.5, - ... }, - ... "1": { - ... "name": "DSR_BE_stock", - ... "enabled": False, - ... "type": "daily", - ... "operator": "greater", - ... "be.be_dsr 0": 8.3, - ... }, - ... } - - >>> bc = BindingConstraintProperties.parse_obj(obj) - >>> constraints = sorted(bc.constraints.values(), key=lambda s: s.id) - >>> pprint([s.dict(by_alias=True) for s in constraints]) - [{'comments': '', - 'enabled': True, - 'filter-synthesis': 'hourly, daily, weekly, monthly, annual', - 'filter-year-by-year': 'hourly, daily, weekly, monthly, annual', - 'id': 'dsr_at_stock', - 'name': 'DSR_AT_stock', - 'operator': , - 'terms': {'at.at_dsr 0': {'area': 'at', - 'cluster': 'at_dsr 0', - 'offset': 0, - 'weight': 6.5}}, - 'type': }, - {'comments': '', - 'enabled': False, - 'filter-synthesis': 'hourly, daily, weekly, monthly, annual', - 'filter-year-by-year': 'hourly, daily, weekly, monthly, annual', - 'id': 'dsr_be_stock', - 'name': 'DSR_BE_stock', - 'operator': , - 'terms': {'be.be_dsr 0': {'area': 'be', - 'cluster': 'be_dsr 0', - 'offset': 0, - 'weight': 8.3}}, - 'type': }] - """ - - class BindingConstraintSection(IniProperties): - """ - Configuration read from a section in the `input/bindingconstraints/bindingconstraints.ini` file. - - >>> from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import BindingConstraintProperties - >>> from pprint import pprint - - Create and validate a new BindingConstraintSection from a dictionary read from a configuration file. - - >>> obj = { - ... "name": "FB001", - ... "id": "fb001", - ... "enabled": True, - ... "type": "hourly", - ... "operator": "less", - ... "filter-synthesis": "hourly, annual", - ... "filter-year-by-year": "", - ... "at.cl1": 1, - ... "de.cl2": "-88.77%7", - ... "at%de": -0.06, - ... "at%es": "8.5%5", - ... } - - >>> bc = BindingConstraintProperties.BindingConstraintSection.parse_obj(obj) - >>> pprint(bc.dict(by_alias=True)) - {'comments': '', - 'enabled': True, - 'filter-synthesis': 'hourly, annual', - 'filter-year-by-year': '', - 'id': 'fb001', - 'name': 'FB001', - 'operator': , - 'terms': {'at%de': {'area1': 'at', - 'area2': 'de', - 'offset': 0, - 'weight': -0.06}, - 'at%es': {'area1': 'at', 'area2': 'es', 'offset': 5, 'weight': 8.5}, - 'at.cl1': {'area': 'at', - 'cluster': 'cl1', - 'offset': 0, - 'weight': 1.0}, - 'de.cl2': {'area': 'de', - 'cluster': 'cl2', - 'offset': 7, - 'weight': -88.77}}, - 'type': } - - We can construct a BindingConstraintSection from a dictionary. - - >>> bc2 = BindingConstraintProperties.BindingConstraintSection.parse_obj(bc.dict()) - >>> bc2 == bc - True - - Convert the BindingConstraintSection to a dictionary for writing to a configuration file. - - >>> pprint(bc2.to_config()) - {'at%de': '-0.06', - 'at%es': '8.5%5', - 'at.cl1': '1.0', - 'comments': '', - 'de.cl2': '-88.77%7', - 'enabled': True, - 'filter-synthesis': 'hourly, annual', - 'filter-year-by-year': '', - 'id': 'fb001', - 'name': 'FB001', - 'operator': 'less', - 'type': 'hourly'} - """ - - id: str - name: str - enabled: bool = True - time_step: BindingConstraintFrequency = Field(default=BindingConstraintFrequency.HOURLY, alias="type") - operator: BindingConstraintOperator = BindingConstraintOperator.EQUAL - comments: str = "" - filter_synthesis: str = Field(default="hourly, daily, weekly, monthly, annual", alias="filter-synthesis") - filter_year_by_year: str = Field(default="hourly, daily, weekly, monthly, annual", alias="filter-year-by-year") - - terms: t.MutableMapping[str, BindingConstraintTerm] = Field(default_factory=dict) - - @root_validator(pre=True) - def _populate_section(cls, values: t.MutableMapping[str, t.Any]) -> t.MutableMapping[str, t.Any]: - """ - Parse the section properties and terms from the configuration file. - """ - - # Extract known properties to leave only terms - new_values = { - "id": values.pop("id", None), - "name": values.pop("name", None), - "enabled": values.pop("enabled", None), - "type": values.pop("time_step", None), - "operator": values.pop("operator", None), - "comments": values.pop("comments", None), - "filter-synthesis": values.pop("filter_synthesis", None), - "filter-year-by-year": values.pop("filter_year_by_year", None), - } - - if new_values["id"] is None: - new_values["id"] = _generate_bc_id(new_values["name"]) - if new_values["type"] is None: - new_values["type"] = values.pop("type", None) - if new_values["filter-synthesis"] is None: - new_values["filter-synthesis"] = values.pop("filter-synthesis", None) - if new_values["filter-year-by-year"] is None: - new_values["filter-year-by-year"] = values.pop("filter-year-by-year", None) - - # Collect terms - new_values["terms"] = terms = {} - if "terms" in values: - for value in values.pop("terms").values(): - obj = value if isinstance(value, dict) else value.dict() - term = build_term_from_obj(obj) - terms[term.bc_id] = term - else: - for term_id, value in values.items(): - term = build_term_from_config(term_id, value) - terms[term.bc_id] = term - - # Drop `None` values so that we can use the default values, but keep "" values - new_values = {k: v for k, v in new_values.items() if v is not None} - - return new_values - - _validate_filtering = validator( - "filter_synthesis", - "filter_year_by_year", - pre=True, - allow_reuse=True, - )(validate_filtering) - - def to_config(self) -> t.Mapping[str, t.Any]: - config_values = { - "id": self.id, - "name": self.name, - "enabled": self.enabled, - "type": self.time_step, - "operator": self.operator, - "comments": self.comments, - "filter-synthesis": self.filter_synthesis, - "filter-year-by-year": self.filter_year_by_year, - } - - for term_id, term in sorted(self.terms.items()): - config_values[term_id] = term.bc_value - - # Convert to a dictionary for writing to a configuration file - config_values = {k: json.loads(json.dumps(v)) for k, v in config_values.items()} - - return config_values - - def insert_term(self, term: BindingConstraintTerm) -> None: - """ - Insert a new term into the section. - """ - term_id = term.bc_id - if term_id in self.terms: - raise ValueError(f"Term '{term_id}' already exists in the binding constraint '{self.id}'.") - self.terms[term_id] = term - - def remove_term(self, term_id: str) -> None: - """ - Remove a term from the section. - """ - if term_id not in self.terms: - raise ValueError(f"Term '{term_id}' does not exist in the binding constraint '{self.id}'.") - del self.terms[term_id] - - def update_term(self, term: BindingConstraintTerm) -> None: - """ - Update an existing term in the section. - """ - term_id = term.bc_id - if term_id not in self.terms: - raise ValueError(f"Term '{term_id}' does not exist in the binding constraint '{self.id}'.") - self.terms[term_id] = term - - def get_term(self, term_id: str) -> BindingConstraintTerm: - """ - Get a term from the section. - """ - if term_id not in self.terms: - raise ValueError(f"Term '{term_id}' does not exist in the binding constraint '{self.id}'.") - return self.terms[term_id] - - constraints: t.MutableMapping[str, BindingConstraintSection] = Field(default_factory=dict) - - @root_validator(pre=True) - def _populate_constraints(cls, values: t.MutableMapping[str, t.Any]) -> t.MutableMapping[str, t.Any]: - """ - Parse the sections from the configuration file. - """ - parse_obj = BindingConstraintProperties.BindingConstraintSection.parse_obj - constraints = {} - - if "constraints" in values: - # Case where the dictionary comes from another BindingConstraintProperties object - for section_value in values["constraints"].values(): - obj = section_value if isinstance(section_value, dict) else section_value.dict() - bc = parse_obj(obj) - constraints[bc.id] = bc - - else: - # Case where the dictionary comes from a configuration file - for section_value in values.values(): - section = parse_obj(section_value) - constraints[section.id] = section - - return {"constraints": constraints} - - def to_config(self) -> t.Mapping[str, t.Any]: - # Constraints are sorted by ID to ensure consistent output for testing - constraints = sorted(self.constraints.values(), key=lambda section: section.id) - return {str(i): section.to_config() for i, section in enumerate(constraints)} - - def insert_constraint(self, constraint: BindingConstraintSection) -> None: - """ - Insert a new constraint into the configuration. - """ - if constraint.id in self.constraints: - raise ValueError(f"Constraint '{constraint.id}' already exists in the configuration.") - self.constraints[constraint.id] = constraint - - def remove_constraint(self, constraint_id: str) -> None: - """ - Remove a constraint from the configuration. - """ - if constraint_id not in self.constraints: - raise ValueError(f"Constraint '{constraint_id}' does not exist in the configuration.") - del self.constraints[constraint_id] - - def update_constraint(self, constraint: BindingConstraintSection) -> None: - """ - Update an existing constraint in the configuration. - """ - if constraint.id not in self.constraints: - raise ValueError(f"Constraint '{constraint.id}' does not exist in the configuration.") - self.constraints[constraint.id] = constraint - - def get_constraint(self, constraint_id: str) -> BindingConstraintSection: - """ - Get a constraint from the configuration. - """ - if constraint_id not in self.constraints: - raise ValueError(f"Constraint '{constraint_id}' does not exist in the configuration.") - return self.constraints[constraint_id] diff --git a/antarest/study/storage/variantstudy/model/command/remove_binding_constraint.py b/antarest/study/storage/variantstudy/model/command/remove_binding_constraint.py index 25b180c49d..958e9d81f1 100644 --- a/antarest/study/storage/variantstudy/model/command/remove_binding_constraint.py +++ b/antarest/study/storage/variantstudy/model/command/remove_binding_constraint.py @@ -30,7 +30,7 @@ def _apply_config(self, study_data: FileStudyTreeConfig) -> Tuple[CommandOutput, def _apply(self, study_data: FileStudy) -> CommandOutput: if self.id not in [bind.id for bind in study_data.config.bindings]: - return CommandOutput(status=False, message="Binding constraint not found") + return CommandOutput(status=False, message=f"Binding constraint not found: '{self.id}'") binding_constraints = study_data.tree.get(["input", "bindingconstraints", "bindingconstraints"]) new_binding_constraints: JSON = {} index = 0 diff --git a/antarest/study/web/study_data_blueprint.py b/antarest/study/web/study_data_blueprint.py index c8df7bbbf6..0c4bccfc99 100644 --- a/antarest/study/web/study_data_blueprint.py +++ b/antarest/study/web/study_data_blueprint.py @@ -18,7 +18,7 @@ from antarest.study.business.adequacy_patch_management import AdequacyPatchFormFields from antarest.study.business.advanced_parameters_management import AdvancedParamsFormFields from antarest.study.business.allocation_management import AllocationFormFields, AllocationMatrix -from antarest.study.business.area_management import AreaCreationDTO, AreaInfoDTO, AreaType, AreaUI, LayerInfoDTO +from antarest.study.business.area_management import AreaCreationDTO, AreaInfoDTO, AreaType, LayerInfoDTO from antarest.study.business.areas.hydro_management import InflowStructure, ManagementOptionsFormFields from antarest.study.business.areas.properties_management import PropertiesFormFields from antarest.study.business.areas.renewable_management import ( @@ -59,10 +59,12 @@ from antarest.study.business.timeseries_config_management import TSFormFields from antarest.study.model import PatchArea, PatchCluster from antarest.study.service import StudyService -from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import BindingConstraintFrequency from antarest.study.storage.rawstudy.model.filesystem.config.area import AreaUI +from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import ( + BindingConstraintFrequency, + BindingConstraintOperator, +) from antarest.study.storage.rawstudy.model.filesystem.config.model import transform_name_to_id -from antarest.study.storage.variantstudy.model.command.common import BindingConstraintOperator logger = logging.getLogger(__name__) diff --git a/tests/integration/study_data_blueprint/test_binding_constraints.py b/tests/integration/study_data_blueprint/test_binding_constraints.py index 4da996a229..e07943573d 100644 --- a/tests/integration/study_data_blueprint/test_binding_constraints.py +++ b/tests/integration/study_data_blueprint/test_binding_constraints.py @@ -543,7 +543,7 @@ def test_lifecycle__nominal(self, client: TestClient, user_access_token: str, st }, headers=user_headers, ) - assert res.status_code == 422 + assert res.status_code == 422, res.json() description = res.json()["description"] assert "cannot fill 'values'" in description assert "'less_term_matrix'" in description @@ -560,11 +560,11 @@ def test_lifecycle__nominal(self, client: TestClient, user_access_token: str, st "operator": "less", "terms": [], "comments": "Incoherent matrix with version", - "less_term_matrix": [[]], + "lessTermMatrix": [[]], }, headers=user_headers, ) - assert res.status_code == 422 + assert res.status_code == 422, res.json() description = res.json()["description"] assert description == "You cannot fill a 'matrix_term' as these values refer to v8.7+ studies" diff --git a/tests/integration/study_data_blueprint/test_table_mode.py b/tests/integration/study_data_blueprint/test_table_mode.py index 23621c0184..bfbf4c72bb 100644 --- a/tests/integration/study_data_blueprint/test_table_mode.py +++ b/tests/integration/study_data_blueprint/test_table_mode.py @@ -450,7 +450,6 @@ def test_lifecycle__nominal( "enabled": True, "time_step": "hourly", "operator": "less", - "coeffs": {}, }, headers=user_headers, ) @@ -463,7 +462,6 @@ def test_lifecycle__nominal( "enabled": False, "time_step": "daily", "operator": "greater", - "coeffs": {}, "comments": "This is a binding constraint", "filter_synthesis": "hourly, daily, weekly", }, @@ -484,9 +482,8 @@ def test_lifecycle__nominal( "binding constraint 1": { "comments": "", "enabled": True, - "filterSynthesis": "hourly, daily, weekly, monthly, " "annual", - "filterYearByYear": "hourly, daily, weekly, monthly, " "annual", - "name": "Binding Constraint 1", + "filterSynthesis": "", + "filterYearByYear": "", "operator": "less", "timeStep": "hourly", }, @@ -494,8 +491,7 @@ def test_lifecycle__nominal( "comments": "This is a binding constraint", "enabled": False, "filterSynthesis": "hourly, daily, weekly", - "filterYearByYear": "hourly, daily, weekly, monthly, " "annual", - "name": "Binding Constraint 2", + "filterYearByYear": "", "operator": "greater", "timeStep": "daily", }, From 863ef2642a8ba81b761d8b823edfb5f35aa86ead Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Sat, 30 Mar 2024 16:29:55 +0100 Subject: [PATCH 089/147] feat(tablemode): change API route to "/v1/studies/{uuid}/table-mode/{table_type}" --- .../study/business/table_mode_management.py | 12 +++---- antarest/study/web/study_data_blueprint.py | 4 +-- .../study_data_blueprint/test_table_mode.py | 33 +++++-------------- 3 files changed, 17 insertions(+), 32 deletions(-) diff --git a/antarest/study/business/table_mode_management.py b/antarest/study/business/table_mode_management.py index 66379b80c6..f6db14cba8 100644 --- a/antarest/study/business/table_mode_management.py +++ b/antarest/study/business/table_mode_management.py @@ -37,12 +37,12 @@ class TableTemplateType(EnumIgnoreCase): - AREA = "area" - LINK = "link" - THERMAL_CLUSTER = "thermal cluster" - RENEWABLE_CLUSTER = "renewable cluster" - ST_STORAGE = "short-term storage" - BINDING_CONSTRAINT = "binding constraint" + AREA = "areas" + LINK = "links" + THERMAL_CLUSTER = "thermals" + RENEWABLE_CLUSTER = "renewables" + ST_STORAGE = "storages" + BINDING_CONSTRAINT = "constraints" class AreaColumns(FormFieldsBaseModel, metaclass=AllOptionalMetaclass): diff --git a/antarest/study/web/study_data_blueprint.py b/antarest/study/web/study_data_blueprint.py index 0c4bccfc99..2a9777801d 100644 --- a/antarest/study/web/study_data_blueprint.py +++ b/antarest/study/web/study_data_blueprint.py @@ -841,7 +841,7 @@ def set_timeseries_form_values( study_service.ts_config_manager.set_field_values(study, field_values) @bp.get( - path="/studies/{uuid}/tablemode", + path="/studies/{uuid}/table-mode/{table_type}", tags=[APITag.study_data], summary="Get table data for table form", ) @@ -862,7 +862,7 @@ def get_table_mode( return table_data @bp.put( - path="/studies/{uuid}/tablemode", + path="/studies/{uuid}/table-mode/{table_type}", tags=[APITag.study_data], summary="Set table data with values from table form", ) diff --git a/tests/integration/study_data_blueprint/test_table_mode.py b/tests/integration/study_data_blueprint/test_table_mode.py index bfbf4c72bb..cb0f81fa2f 100644 --- a/tests/integration/study_data_blueprint/test_table_mode.py +++ b/tests/integration/study_data_blueprint/test_table_mode.py @@ -56,11 +56,7 @@ def test_lifecycle__nominal( # Table Mode - Area # ================= - res = client.get( - f"/v1/studies/{study_id}/tablemode", - headers=user_headers, - params={"table_type": "area"}, - ) + res = client.get(f"/v1/studies/{study_id}/table-mode/areas", headers=user_headers) assert res.status_code == 200, res.json() expected = { "de": { @@ -119,10 +115,9 @@ def test_lifecycle__nominal( # ================== res = client.get( - f"/v1/studies/{study_id}/tablemode", + f"/v1/studies/{study_id}/table-mode/links", headers=user_headers, params={ - "table_type": "link", "columns": ",".join( [ "hurdlesCost", @@ -189,12 +184,9 @@ def test_lifecycle__nominal( # ============================= res = client.get( - f"/v1/studies/{study_id}/tablemode", + f"/v1/studies/{study_id}/table-mode/thermals", headers=user_headers, - params={ - "table_type": "thermal cluster", - "columns": ",".join(["group", "unitCount", "nominalCapacity", "so2"]), - }, + params={"columns": ",".join(["group", "unitCount", "nominalCapacity", "so2"])}, ) assert res.status_code == 200, res.json() expected = { @@ -302,12 +294,9 @@ def test_lifecycle__nominal( res.raise_for_status() res = client.get( - f"/v1/studies/{study_id}/tablemode", + f"/v1/studies/{study_id}/table-mode/renewables", headers=user_headers, - params={ - "table_type": "renewable cluster", - "columns": ",".join(["group", "enabled", "unitCount", "nominalCapacity"]), - }, + params={"columns": ",".join(["group", "enabled", "unitCount", "nominalCapacity"])}, ) assert res.status_code == 200, res.json() expected = { @@ -375,10 +364,9 @@ def test_lifecycle__nominal( res.raise_for_status() res = client.get( - f"/v1/studies/{study_id}/tablemode", + f"/v1/studies/{study_id}/table-mode/storages", headers=user_headers, params={ - "table_type": "short-term storage", "columns": ",".join( [ "group", @@ -470,12 +458,9 @@ def test_lifecycle__nominal( assert res.status_code == 200, res.json() res = client.get( - f"/v1/studies/{study_id}/tablemode", + f"/v1/studies/{study_id}/table-mode/constraints", headers=user_headers, - params={ - "table_type": "binding constraint", - "columns": "", - }, + params={"columns": ""}, ) assert res.status_code == 200, res.json() expected = { From 2ee0fdb7ac10978e3dbce6617ae885b49c0f38e7 Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Sun, 31 Mar 2024 16:48:52 +0200 Subject: [PATCH 090/147] feat(config): rgb color field is converted to "#RRGGBB" format --- .../rawstudy/model/filesystem/config/area.py | 57 ++++++++++--------- .../rawstudy/model/filesystem/config/links.py | 10 ++-- 2 files changed, 34 insertions(+), 33 deletions(-) diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/area.py b/antarest/study/storage/rawstudy/model/filesystem/config/area.py index 5fdd6fcf88..d30c2b0a2a 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/config/area.py +++ b/antarest/study/storage/rawstudy/model/filesystem/config/area.py @@ -1,6 +1,7 @@ """ Object model used to read and update area configuration. """ + import typing as t import typing_extensions as te @@ -40,7 +41,7 @@ class OptimizationProperties(IniProperties): ... }, ... } - >>> opt = OptimizationProperties.parse_obj(obj) + >>> opt = OptimizationProperties(**obj) >>> pprint(opt.dict(by_alias=True), width=80) {'filtering': {'filter-synthesis': 'hourly, daily, weekly, monthly, annual', @@ -149,21 +150,21 @@ class AreaUI(IniProperties): ... "color_g": 128, ... "color_b": 255, ... } - >>> ui = AreaUI.parse_obj(obj) + >>> ui = AreaUI(**obj) >>> pprint(ui.dict(by_alias=True), width=80) - {'colorRgb': (0, 128, 255), 'x': 1148, 'y': 144} + {'colorRgb': '#0080FF', 'x': 1148, 'y': 144} Update the color: >>> ui.color_rgb = (192, 168, 127) >>> pprint(ui.dict(by_alias=True), width=80) - {'colorRgb': (192, 168, 127), 'x': 1148, 'y': 144} + {'colorRgb': '#C0A87F', 'x': 1148, 'y': 144} """ x: int = Field(0, description="x coordinate of the area in the map") y: int = Field(0, description="y coordinate of the area in the map") - color_rgb: t.Tuple[int, int, int] = Field( - (230, 108, 44), + color_rgb: str = Field( + "#E66C2C", alias="colorRgb", description="color of the area in the map", ) @@ -183,17 +184,14 @@ def to_config(self) -> t.Mapping[str, t.Any]: >>> from antarest.study.storage.rawstudy.model.filesystem.config.area import AreaUI >>> from pprint import pprint - >>> ui = AreaUI(x=1148, y=144, color_rgb=(0, 128, 255)) + >>> ui = AreaUI(x=1148, y=144, color_rgb='#0080FF') >>> pprint(ui.to_config(), width=80) {'color_b': 255, 'color_g': 128, 'color_r': 0, 'x': 1148, 'y': 144} """ - return { - "x": self.x, - "y": self.y, - "color_r": self.color_rgb[0], - "color_g": self.color_rgb[1], - "color_b": self.color_rgb[2], - } + r = int(self.color_rgb[1:3], 16) + g = int(self.color_rgb[3:5], 16) + b = int(self.color_rgb[5:7], 16) + return {"x": self.x, "y": self.y, "color_r": r, "color_g": g, "color_b": b} class UIProperties(IniProperties): @@ -209,9 +207,9 @@ class UIProperties(IniProperties): >>> ui = UIProperties() >>> pprint(ui.dict(), width=80) - {'layer_styles': {0: {'color_rgb': (230, 108, 44), 'x': 0, 'y': 0}}, + {'layer_styles': {0: {'color_rgb': '#E66C2C', 'x': 0, 'y': 0}}, 'layers': {0}, - 'style': {'color_rgb': (230, 108, 44), 'x': 0, 'y': 0}} + 'style': {'color_rgb': '#E66C2C', 'x': 0, 'y': 0}} Create and validate a new UI object from a dictionary read from a configuration file. @@ -235,15 +233,15 @@ class UIProperties(IniProperties): ... }, ... } - >>> ui = UIProperties.parse_obj(obj) + >>> ui = UIProperties(**obj) >>> pprint(ui.dict(), width=80) - {'layer_styles': {0: {'color_rgb': (0, 128, 255), 'x': 1148, 'y': 144}, - 4: {'color_rgb': (0, 128, 255), 'x': 1148, 'y': 144}, - 6: {'color_rgb': (192, 168, 99), 'x': 1148, 'y': 144}, - 7: {'color_rgb': (0, 128, 255), 'x': 18, 'y': -22}, - 8: {'color_rgb': (0, 128, 255), 'x': 1148, 'y': 144}}, + {'layer_styles': {0: {'color_rgb': '#0080FF', 'x': 1148, 'y': 144}, + 4: {'color_rgb': '#0080FF', 'x': 1148, 'y': 144}, + 6: {'color_rgb': '#C0A863', 'x': 1148, 'y': 144}, + 7: {'color_rgb': '#0080FF', 'x': 18, 'y': -22}, + 8: {'color_rgb': '#0080FF', 'x': 1148, 'y': 144}}, 'layers': {0, 7}, - 'style': {'color_rgb': (0, 128, 255), 'x': 1148, 'y': 144}} + 'style': {'color_rgb': '#0080FF', 'x': 1148, 'y': 144}} """ @@ -343,7 +341,7 @@ def to_config(self) -> t.Mapping[str, t.Mapping[str, t.Any]]: ... style=AreaUI(x=1148, y=144, color_rgb=(0, 128, 255)), ... layers={0, 7}, ... layer_styles={ - ... 6: AreaUI(x=1148, y=144, color_rgb=(192, 168, 99)), + ... 6: AreaUI(x=1148, y=144, color_rgb='#C0A863'), ... 7: AreaUI(x=18, y=-22, color_rgb=(0, 128, 255)), ... }) >>> pprint(ui.to_config(), width=80) @@ -368,7 +366,10 @@ def to_config(self) -> t.Mapping[str, t.Mapping[str, t.Any]]: for layer, style in self.layer_styles.items(): obj["layerX"][str(layer)] = style.x obj["layerY"][str(layer)] = style.y - obj["layerColor"][str(layer)] = ", ".join(str(c) for c in style.color_rgb) + r = int(style.color_rgb[1:3], 16) + g = int(style.color_rgb[3:5], 16) + b = int(style.color_rgb[5:7], 16) + obj["layerColor"][str(layer)] = f"{r}, {g}, {b}" return obj @@ -395,9 +396,9 @@ class AreaFolder(IniProperties): 'other_dispatchable_power': True, 'spread_spilled_energy_cost': 0.0, 'spread_unsupplied_energy_cost': 0.0}}, - 'ui': {'layer_styles': {0: {'color_rgb': (230, 108, 44), 'x': 0, 'y': 0}}, + 'ui': {'layer_styles': {0: {'color_rgb': '#E66C2C', 'x': 0, 'y': 0}}, 'layers': {0}, - 'style': {'color_rgb': (230, 108, 44), 'x': 0, 'y': 0}}} + 'style': {'color_rgb': '#E66C2C', 'x': 0, 'y': 0}}} >>> pprint(obj.to_config(), width=80) {'optimization': {'filtering': {'filter-synthesis': 'hourly, daily, weekly, ' @@ -497,7 +498,7 @@ class ThermalAreasProperties(IniProperties): ... "cz": "100.0", ... }, ... } - >>> area = ThermalAreasProperties.parse_obj(obj) + >>> area = ThermalAreasProperties(**obj) >>> pprint(area.dict(), width=80) {'spilled_energy_cost': {'cz': 100.0}, 'unserverd_energy_cost': {'at': 4000.8, diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/links.py b/antarest/study/storage/rawstudy/model/filesystem/config/links.py index c10b91bec0..d20f5274c1 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/config/links.py +++ b/antarest/study/storage/rawstudy/model/filesystem/config/links.py @@ -80,11 +80,11 @@ class LinkProperties(IniProperties): ... "filter-year-by-year": "hourly, daily, weekly, monthly, annual", ... } - >>> opt = LinkProperties.parse_obj(obj) + >>> opt = LinkProperties(**obj) >>> pprint(opt.dict(by_alias=True), width=80) {'asset-type': , - 'colorRgb': (80, 192, 255), + 'colorRgb': '#50C0FF', 'display-comments': True, 'filter-synthesis': 'hourly, daily, weekly, monthly, annual', 'filter-year-by-year': 'hourly, daily, weekly, monthly, annual', @@ -108,10 +108,10 @@ class LinkProperties(IniProperties): display_comments: bool = Field(default=True, alias="display-comments") filter_synthesis: str = Field(default="hourly, daily, weekly, monthly, annual", alias="filter-synthesis") filter_year_by_year: str = Field(default="hourly, daily, weekly, monthly, annual", alias="filter-year-by-year") - color_rgb: t.Tuple[int, int, int] = Field( - (112, 112, 112), + color_rgb: str = Field( + "#707070", alias="colorRgb", - description="color of the link in the map", + description="color of the area in the map", ) _validate_filtering = validator( From d064897a0e24d5c50546251f7c9b640efb0e4dde Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Sun, 31 Mar 2024 16:53:54 +0200 Subject: [PATCH 091/147] feat(config): the default value of `filter-synthesis` and `filter-year-by-year` fields is "" --- .../rawstudy/model/filesystem/config/area.py | 16 ++++++---------- .../rawstudy/model/filesystem/config/links.py | 4 ++-- 2 files changed, 8 insertions(+), 12 deletions(-) diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/area.py b/antarest/study/storage/rawstudy/model/filesystem/config/area.py index d30c2b0a2a..52c70540b0 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/config/area.py +++ b/antarest/study/storage/rawstudy/model/filesystem/config/area.py @@ -75,8 +75,8 @@ class OptimizationProperties(IniProperties): class FilteringSection(IniProperties): """Configuration read from section `[filtering]` of `/input/areas//optimization.ini`.""" - filter_synthesis: str = Field("hourly, daily, weekly, monthly, annual", alias="filter-synthesis") - filter_year_by_year: str = Field("hourly, daily, weekly, monthly, annual", alias="filter-year-by-year") + filter_synthesis: str = Field("", alias="filter-synthesis") + filter_year_by_year: str = Field("", alias="filter-year-by-year") _validate_filtering = validator( "filter_synthesis", @@ -387,10 +387,8 @@ class AreaFolder(IniProperties): >>> obj = AreaFolder() >>> pprint(obj.dict(), width=80) {'adequacy_patch': None, - 'optimization': {'filtering': {'filter_synthesis': 'hourly, daily, weekly, ' - 'monthly, annual', - 'filter_year_by_year': 'hourly, daily, weekly, ' - 'monthly, annual'}, + 'optimization': {'filtering': {'filter_synthesis': '', + 'filter_year_by_year': ''}, 'nodal_optimization': {'dispatchable_hydro_power': True, 'non_dispatchable_power': True, 'other_dispatchable_power': True, @@ -401,10 +399,8 @@ class AreaFolder(IniProperties): 'style': {'color_rgb': '#E66C2C', 'x': 0, 'y': 0}}} >>> pprint(obj.to_config(), width=80) - {'optimization': {'filtering': {'filter-synthesis': 'hourly, daily, weekly, ' - 'monthly, annual', - 'filter-year-by-year': 'hourly, daily, weekly, ' - 'monthly, annual'}, + {'optimization': {'filtering': {'filter-synthesis': '', + 'filter-year-by-year': ''}, 'nodal optimization': {'dispatchable-hydro-power': True, 'non-dispatchable-power': True, 'other-dispatchable-power': True, diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/links.py b/antarest/study/storage/rawstudy/model/filesystem/config/links.py index d20f5274c1..1438cb0009 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/config/links.py +++ b/antarest/study/storage/rawstudy/model/filesystem/config/links.py @@ -106,8 +106,8 @@ class LinkProperties(IniProperties): link_style: str = Field(default="plain", alias="link-style") link_width: int = Field(default=1, alias="link-width") display_comments: bool = Field(default=True, alias="display-comments") - filter_synthesis: str = Field(default="hourly, daily, weekly, monthly, annual", alias="filter-synthesis") - filter_year_by_year: str = Field(default="hourly, daily, weekly, monthly, annual", alias="filter-year-by-year") + filter_synthesis: str = Field(default="", alias="filter-synthesis") + filter_year_by_year: str = Field(default="", alias="filter-year-by-year") color_rgb: str = Field( "#707070", alias="colorRgb", From 70de10822383bdee7f1e7b63d539e7f53b07f420 Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Sun, 31 Mar 2024 18:12:28 +0200 Subject: [PATCH 092/147] chore: correct typing in `BindingConstraintManager` class --- antarest/study/business/binding_constraint_management.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/antarest/study/business/binding_constraint_management.py b/antarest/study/business/binding_constraint_management.py index 6dca21b306..b5037ca3b8 100644 --- a/antarest/study/business/binding_constraint_management.py +++ b/antarest/study/business/binding_constraint_management.py @@ -538,7 +538,7 @@ def get_binding_constraint(self, study: Study, bc_id: str) -> ConstraintOutput: if bc_id not in constraints_by_id: raise BindingConstraintNotFound(f"Binding constraint '{bc_id}' not found") - return constraints_by_id[bc_id] + return t.cast(ConstraintOutput, constraints_by_id[bc_id]) def get_binding_constraints( self, study: Study, filters: ConstraintFilters = ConstraintFilters() From efeb783f57f526dbc356953d5bd0b0912a038fb2 Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Sun, 31 Mar 2024 18:11:26 +0200 Subject: [PATCH 093/147] feat(tablemode): manage links update --- antarest/study/business/link_management.py | 35 ++++++- .../study/business/table_mode_management.py | 80 +++++----------- .../rawstudy/model/filesystem/config/links.py | 23 ++++- .../study_data_blueprint/test_table_mode.py | 96 +++++++++++++------ 4 files changed, 142 insertions(+), 92 deletions(-) diff --git a/antarest/study/business/link_management.py b/antarest/study/business/link_management.py index 60e3ddc947..4d65e51aba 100644 --- a/antarest/study/business/link_management.py +++ b/antarest/study/business/link_management.py @@ -9,6 +9,7 @@ from antarest.study.storage.storage_service import StudyStorageService from antarest.study.storage.variantstudy.model.command.create_link import CreateLink from antarest.study.storage.variantstudy.model.command.remove_link import RemoveLink +from antarest.study.storage.variantstudy.model.command.update_config import UpdateConfig _ALL_LINKS_PATH = "input/links" @@ -26,7 +27,7 @@ class LinkInfoDTO(BaseModel): @camel_case_model -class GetLinkDTO(LinkProperties, metaclass=AllOptionalMetaclass): +class GetLinkDTO(LinkProperties, metaclass=AllOptionalMetaclass, use_none=True): """ DTO object use to get the link information. """ @@ -105,7 +106,35 @@ def get_all_links_props(self, study: RawStudy) -> t.Mapping[t.Tuple[str, str], G property_map = entries.get("properties") or {} for area2_id, properties_cfg in property_map.items(): area1_id, area2_id = sorted([area1_id, area2_id]) - properties = LinkProperties.parse_obj(properties_cfg) - links_by_ids[(area1_id, area2_id)] = GetLinkDTO.parse_obj(properties.dict(by_alias=False)) + properties = LinkProperties(**properties_cfg) + links_by_ids[(area1_id, area2_id)] = GetLinkDTO(**properties.dict(by_alias=False)) return links_by_ids + + def update_links_props( + self, + study: RawStudy, + update_links_by_ids: t.Mapping[t.Tuple[str, str], GetLinkDTO], + ) -> t.Mapping[t.Tuple[str, str], GetLinkDTO]: + old_links_by_ids = self.get_all_links_props(study) + new_links_by_ids = {} + file_study = self.storage_service.get_storage(study).get_raw(study) + commands = [] + for (area1, area2), update_link_dto in update_links_by_ids.items(): + # Update the link properties. + old_link_dto = old_links_by_ids[(area1, area2)] + new_link_dto = old_link_dto.copy(update=update_link_dto.dict(by_alias=False, exclude_none=True)) + new_links_by_ids[(area1, area2)] = new_link_dto + + # Convert the DTO to a configuration object and update the configuration file. + properties = LinkProperties(**new_link_dto.dict(by_alias=False)) + path = f"{_ALL_LINKS_PATH}/{area1}/properties" + cmd = UpdateConfig( + target=path, + data={area2: properties.to_config()}, + command_context=self.storage_service.variant_study_service.command_factory.command_context, + ) + commands.append(cmd) + + execute_or_add_commands(study, file_study, commands, self.storage_service) + return new_links_by_ids diff --git a/antarest/study/business/table_mode_management.py b/antarest/study/business/table_mode_management.py index f6db14cba8..f0ad147346 100644 --- a/antarest/study/business/table_mode_management.py +++ b/antarest/study/business/table_mode_management.py @@ -9,8 +9,8 @@ from antarest.study.business.areas.thermal_management import ThermalManager from antarest.study.business.binding_constraint_management import BindingConstraintManager from antarest.study.business.enum_ignore_case import EnumIgnoreCase -from antarest.study.business.link_management import LinkManager -from antarest.study.business.utils import AllOptionalMetaclass, FormFieldsBaseModel, execute_or_add_commands +from antarest.study.business.link_management import GetLinkDTO, LinkManager +from antarest.study.business.utils import AllOptionalMetaclass, FormFieldsBaseModel from antarest.study.common.default_values import FilteringOptions, LinkProperties, NodalOptimization from antarest.study.model import RawStudy from antarest.study.storage.rawstudy.model.filesystem.config.area import AdequacyPatchMode @@ -21,9 +21,6 @@ from antarest.study.storage.rawstudy.model.filesystem.config.links import AssetType, TransmissionCapacity from antarest.study.storage.rawstudy.model.filesystem.config.thermal import LawOption, LocalTSGenerationBehavior from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy -from antarest.study.storage.variantstudy.model.command.icommand import ICommand -from antarest.study.storage.variantstudy.model.command.update_binding_constraint import UpdateBindingConstraint -from antarest.study.storage.variantstudy.model.command.update_config import UpdateConfig AREA_PATH = "input/areas/{area}" THERMAL_PATH = "input/thermal/areas" @@ -711,7 +708,7 @@ def get_table_data( if columns: # Create a new dataframe with the listed columns. # If a column does not exist in the DataFrame, it is created with empty values. - df = pd.DataFrame(df, columns=columns) + df = pd.DataFrame(df, columns=columns) # type: ignore df = df.where(pd.notna(df), other=None) obj = df.to_dict(orient="index") @@ -722,7 +719,7 @@ def get_table_data( if pd.isna(value): row[key] = None - return obj + return t.cast(TableDataDTO, obj) file_study = self.storage_service.get_storage(study).get_raw(study) columns_model = COLUMNS_MODELS_BY_TYPE[table_type] @@ -759,50 +756,25 @@ def set_table_data( self, study: RawStudy, table_type: TableTemplateType, - data: t.Dict[str, ColumnsModelTypes], - ) -> None: - commands: t.List[ICommand] = [] - bindings_by_id = None - command_context = self.storage_service.variant_study_service.command_factory.command_context - - for key, columns in data.items(): - path_vars = _get_path_vars_from_key(table_type, key) - - if table_type == TableTemplateType.BINDING_CONSTRAINT: - file_study = self.storage_service.get_storage(study).get_raw(study) - bindings_by_id = bindings_by_id or { - binding["id"]: binding for binding in _get_glob_object(file_study, table_type).values() - } - binding_id = path_vars["id"] - current_binding = bindings_by_id.get(binding_id, None) - - if current_binding: - col_values = columns.dict(exclude_none=True) - current_binding_dto = BindingConstraintManager.constraint_model_adapter( - current_binding, int(study.version) - ) - - commands.append( - UpdateBindingConstraint( - id=binding_id, - enabled=col_values.get("enabled", current_binding_dto.enabled), - time_step=col_values.get("type", current_binding_dto.time_step), - operator=col_values.get("operator", current_binding_dto.operator), - coeffs=BindingConstraintManager.terms_to_coeffs(current_binding_dto.terms), - command_context=command_context, - ) - ) - else: - for col, val in columns.__iter__(): - if val is not None: - commands.append( - UpdateConfig( - target=_get_column_path(table_type, col, path_vars), - data=val, - command_context=command_context, - ) - ) - - if commands: - file_study = self.storage_service.get_storage(study).get_raw(study) - execute_or_add_commands(study, file_study, commands, self.storage_service) + data: TableDataDTO, + ) -> TableDataDTO: + if table_type == TableTemplateType.AREA: + return {} + elif table_type == TableTemplateType.LINK: + links_map = {tuple(key.split(" / ")): GetLinkDTO(**values) for key, values in data.items()} + updated_map = self._link_manager.update_links_props(study, links_map) # type: ignore + data = { + f"{area1_id} / {area2_id}": link.dict(by_alias=True) + for (area1_id, area2_id), link in updated_map.items() + } + return data + elif table_type == TableTemplateType.THERMAL_CLUSTER: + return {} + elif table_type == TableTemplateType.RENEWABLE_CLUSTER: + return {} + elif table_type == TableTemplateType.ST_STORAGE: + return {} + elif table_type == TableTemplateType.BINDING_CONSTRAINT: + return {} + else: # pragma: no cover + raise NotImplementedError(f"Table type {table_type} not implemented") diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/links.py b/antarest/study/storage/rawstudy/model/filesystem/config/links.py index 1438cb0009..ddd5d9abb9 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/config/links.py +++ b/antarest/study/storage/rawstudy/model/filesystem/config/links.py @@ -94,6 +94,21 @@ class LinkProperties(IniProperties): 'loop-flow': False, 'transmission-capacities': , 'use-phase-shifter': False} + + >>> pprint(opt.to_config(), width=80) + {'asset-type': 'ac', + 'colorb': 255, + 'colorg': 192, + 'colorr': 80, + 'display-comments': True, + 'filter-synthesis': 'hourly, daily, weekly, monthly, annual', + 'filter-year-by-year': 'hourly, daily, weekly, monthly, annual', + 'hurdles-cost': False, + 'link-style': 'plain', + 'link-width': 1, + 'loop-flow': False, + 'transmission-capacities': 'infinite', + 'use-phase-shifter': False} """ hurdles_cost: bool = Field(default=False, alias="hurdles-cost") @@ -133,10 +148,10 @@ def to_config(self) -> t.Mapping[str, t.Any]: Convert the object to a dictionary for writing to a configuration file. """ obj = dict(super().to_config()) - color_rgb = obj.pop("color_rgb", (112, 112, 112)) + color_rgb = obj.pop("colorRgb", "#707070") return { - "colorr": color_rgb[0], - "colorg": color_rgb[1], - "colorb": color_rgb[2], + "colorr": int(color_rgb[1:3], 16), + "colorg": int(color_rgb[3:5], 16), + "colorb": int(color_rgb[5:7], 16), **obj, } diff --git a/tests/integration/study_data_blueprint/test_table_mode.py b/tests/integration/study_data_blueprint/test_table_mode.py index cb0f81fa2f..0ed6beae02 100644 --- a/tests/integration/study_data_blueprint/test_table_mode.py +++ b/tests/integration/study_data_blueprint/test_table_mode.py @@ -56,6 +56,24 @@ def test_lifecycle__nominal( # Table Mode - Area # ================= + # res = client.put( + # f"/v1/studies/{study_id}/table-mode/areas", + # headers=user_headers, + # json={ + # "de": { + # "averageUnsuppliedEnergyCost": 3456, + # "dispatchableHydroPower": False, + # "filterSynthesis": "daily, monthly", + # "filterYearByYear": "weekly, annual", + # }, + # "es": { + # "adequacyPatchMode": "inside", + # "spreadSpilledEnergyCost": None, # not changed + # }, + # }, + # ) + # assert res.status_code == 200, res.json() + res = client.get(f"/v1/studies/{study_id}/table-mode/areas", headers=user_headers) assert res.status_code == 200, res.json() expected = { @@ -114,58 +132,69 @@ def test_lifecycle__nominal( # Table Mode - Links # ================== - res = client.get( + res = client.put( f"/v1/studies/{study_id}/table-mode/links", headers=user_headers, - params={ - "columns": ",".join( - [ - "hurdlesCost", - "loopFlow", - "usePhaseShifter", - "transmissionCapacities", - "assetType", - "linkStyle", - "linkWidth", - "displayComments", - "filterSynthesis", - "filterYearByYear", - "colorRgb", - ] - ), + json={ + "de / fr": { + "colorRgb": "#FFA500", + "displayComments": False, + "filterSynthesis": "hourly, daily, weekly, annual", + "filterYearByYear": "hourly, daily, monthly, annual", + "hurdlesCost": True, + "linkStyle": "plain", + "linkWidth": 2, + "loopFlow": False, + "transmissionCapacities": "ignore", + }, + "es / fr": { + "colorRgb": "#FF6347", + "displayComments": True, + "filterSynthesis": "hourly, daily, weekly, monthly, annual, annual", # duplicate is ignored + "filterYearByYear": "hourly, daily, weekly, annual", + "hurdlesCost": True, + "linkStyle": "plain", + "linkWidth": 1, + "loopFlow": False, + "transmissionCapacities": "enabled", + "usePhaseShifter": True, + }, + "fr / it": { + "assetType": "DC", # case-insensitive + }, }, ) assert res.status_code == 200, res.json() - expected = { + expected_links = { "de / fr": { "assetType": "ac", - "colorRgb": [112, 112, 112], - "displayComments": True, - "filterSynthesis": "", - "filterYearByYear": "hourly", + "colorRgb": "#FFA500", + "displayComments": False, + "filterSynthesis": "hourly, daily, weekly, annual", + "filterYearByYear": "hourly, daily, monthly, annual", "hurdlesCost": True, "linkStyle": "plain", - "linkWidth": 1, + "linkWidth": 2, "loopFlow": False, - "transmissionCapacities": "enabled", + "transmissionCapacities": "ignore", "usePhaseShifter": False, }, "es / fr": { "assetType": "ac", - "colorRgb": [112, 112, 112], + "colorRgb": "#FF6347", "displayComments": True, - "filterSynthesis": "", - "filterYearByYear": "hourly", + "filterSynthesis": "hourly, daily, weekly, monthly, annual", + "filterYearByYear": "hourly, daily, weekly, annual", "hurdlesCost": True, "linkStyle": "plain", "linkWidth": 1, "loopFlow": False, "transmissionCapacities": "enabled", - "usePhaseShifter": False, + "usePhaseShifter": True, }, "fr / it": { - "assetType": "ac", - "colorRgb": [112, 112, 112], + "assetType": "dc", + "colorRgb": "#707070", "displayComments": True, "filterSynthesis": "", "filterYearByYear": "hourly", @@ -178,7 +207,12 @@ def test_lifecycle__nominal( }, } actual = res.json() - assert actual == expected + assert actual == expected_links + + res = client.get(f"/v1/studies/{study_id}/table-mode/links", headers=user_headers) + assert res.status_code == 200, res.json() + actual = res.json() + assert actual == expected_links # Table Mode - Thermal Clusters # ============================= From 9ff7d2de9c89fae238baeda9459bb5d00db1f1ca Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Sun, 31 Mar 2024 19:05:56 +0200 Subject: [PATCH 094/147] feat(tablemode): manage thermals update --- .../business/areas/thermal_management.py | 32 ++++++ .../study/business/table_mode_management.py | 15 ++- .../study_data_blueprint/test_table_mode.py | 106 +++++++++++++++++- 3 files changed, 149 insertions(+), 4 deletions(-) diff --git a/antarest/study/business/areas/thermal_management.py b/antarest/study/business/areas/thermal_management.py index 30fe6c2ce1..651af4b91f 100644 --- a/antarest/study/business/areas/thermal_management.py +++ b/antarest/study/business/areas/thermal_management.py @@ -223,6 +223,38 @@ def get_all_thermals_props( } return all_clusters + def update_thermals_props( + self, + study: Study, + update_thermals_by_areas: t.Mapping[str, t.Sequence[ThermalClusterOutput]], + ) -> t.Mapping[str, t.Sequence[ThermalClusterOutput]]: + old_thermals_by_areas = self.get_all_thermals_props(study) + new_thermals_by_names: t.MutableMapping[str, t.MutableSequence[ThermalClusterOutput]] = {} + file_study = self.storage_service.get_storage(study).get_raw(study) + commands = [] + for area_id, update_thermals in update_thermals_by_areas.items(): + old_thermals = old_thermals_by_areas.get(area_id, []) + old_thermals_by_id = {cluster.id: cluster for cluster in old_thermals} + update_thermals_by_id = {cluster.id: cluster for cluster in update_thermals} + for cluster_id, update_cluster in update_thermals_by_id.items(): + # Update the thermal cluster properties. + old_cluster = old_thermals_by_id[cluster_id] + new_cluster = old_cluster.copy(update=update_cluster.dict(by_alias=False, exclude_none=True)) + new_thermals_by_names.setdefault(area_id, []).append(new_cluster) + + # Convert the DTO to a configuration object and update the configuration file. + properties = create_thermal_config(study.version, **new_cluster.dict(by_alias=False, exclude_none=True)) + path = _CLUSTER_PATH.format(area_id=area_id, cluster_id=cluster_id) + cmd = UpdateConfig( + target=path, + data=json.loads(properties.json(by_alias=True, exclude={"id"})), + command_context=self.storage_service.variant_study_service.command_factory.command_context, + ) + commands.append(cmd) + + execute_or_add_commands(study, file_study, commands, self.storage_service) + return new_thermals_by_names + def create_cluster(self, study: Study, area_id: str, cluster_data: ThermalClusterCreation) -> ThermalClusterOutput: """ Create a new cluster. diff --git a/antarest/study/business/table_mode_management.py b/antarest/study/business/table_mode_management.py index f0ad147346..0e632e7084 100644 --- a/antarest/study/business/table_mode_management.py +++ b/antarest/study/business/table_mode_management.py @@ -1,3 +1,4 @@ +import collections import typing as t import pandas as pd @@ -6,7 +7,7 @@ from antarest.study.business.area_management import AreaManager from antarest.study.business.areas.renewable_management import RenewableManager, TimeSeriesInterpretation from antarest.study.business.areas.st_storage_management import STStorageManager -from antarest.study.business.areas.thermal_management import ThermalManager +from antarest.study.business.areas.thermal_management import ThermalClusterOutput, ThermalManager from antarest.study.business.binding_constraint_management import BindingConstraintManager from antarest.study.business.enum_ignore_case import EnumIgnoreCase from antarest.study.business.link_management import GetLinkDTO, LinkManager @@ -769,7 +770,17 @@ def set_table_data( } return data elif table_type == TableTemplateType.THERMAL_CLUSTER: - return {} + thermals_by_areas = collections.defaultdict(list) + for key, values in data.items(): + area_id, cluster_id = key.split(" / ") + thermals_by_areas[area_id].append(ThermalClusterOutput(**values, id=cluster_id)) + thermals_map = self._thermal_manager.update_thermals_props(study, thermals_by_areas) + data = { + f"{area_id} / {cluster.id}": cluster.dict(by_alias=True) + for area_id, clusters in thermals_map.items() + for cluster in clusters + } + return data elif table_type == TableTemplateType.RENEWABLE_CLUSTER: return {} elif table_type == TableTemplateType.ST_STORAGE: diff --git a/tests/integration/study_data_blueprint/test_table_mode.py b/tests/integration/study_data_blueprint/test_table_mode.py index 0ed6beae02..bf45d3407f 100644 --- a/tests/integration/study_data_blueprint/test_table_mode.py +++ b/tests/integration/study_data_blueprint/test_table_mode.py @@ -217,6 +217,108 @@ def test_lifecycle__nominal( # Table Mode - Thermal Clusters # ============================= + res = client.put( + f"/v1/studies/{study_id}/table-mode/thermals", + headers=user_headers, + json={ + "de / 01_solar": { + "group": "Other 2", + "nominalCapacity": 500000, + "so2": 8.25, + "unitCount": 17, + }, + "de / 02_wind_on": { + "group": "Nuclear", + "nominalCapacity": 314159, + "co2": 123, + "unitCount": 15, + }, + }, + ) + assert res.status_code == 200, res.json() + expected_thermals = { + "de / 01_solar": { + "co2": 0.0, + "costGeneration": None, + "efficiency": None, + "enabled": True, + "fixedCost": 0.0, + "genTs": "use global", + "group": "Other 2", + "id": "01_solar", + "lawForced": "uniform", + "lawPlanned": "uniform", + "marginalCost": 10.0, + "marketBidCost": 10.0, + "minDownTime": 1, + "minStablePower": 0.0, + "minUpTime": 1, + "mustRun": False, + "name": "01_solar", + "nh3": 0.0, + "nmvoc": 0.0, + "nominalCapacity": 500000.0, + "nox": 0.0, + "op1": 0.0, + "op2": 0.0, + "op3": 0.0, + "op4": 0.0, + "op5": 0.0, + "pm10": 0.0, + "pm25": 0.0, + "pm5": 0.0, + "so2": 8.25, + "spinning": 0.0, + "spreadCost": 0.0, + "startupCost": 0.0, + "unitCount": 17, + "variableOMCost": None, + "volatilityForced": 0.0, + "volatilityPlanned": 0.0, + }, + "de / 02_wind_on": { + "co2": 123.0, + "costGeneration": None, + "efficiency": None, + "enabled": True, + "fixedCost": 0.0, + "genTs": "use global", + "group": "Nuclear", + "id": "02_wind_on", + "lawForced": "uniform", + "lawPlanned": "uniform", + "marginalCost": 20.0, + "marketBidCost": 20.0, + "minDownTime": 1, + "minStablePower": 0.0, + "minUpTime": 1, + "mustRun": False, + "name": "02_wind_on", + "nh3": 0.0, + "nmvoc": 0.0, + "nominalCapacity": 314159.0, + "nox": 0.0, + "op1": 0.0, + "op2": 0.0, + "op3": 0.0, + "op4": 0.0, + "op5": 0.0, + "pm10": 0.0, + "pm25": 0.0, + "pm5": 0.0, + "so2": 0.0, + "spinning": 0.0, + "spreadCost": 0.0, + "startupCost": 0.0, + "unitCount": 15, + "variableOMCost": None, + "volatilityForced": 0.0, + "volatilityPlanned": 0.0, + }, + } + assert res.json()["de / 01_solar"] == expected_thermals["de / 01_solar"] + assert res.json()["de / 02_wind_on"] == expected_thermals["de / 02_wind_on"] + res = client.get( f"/v1/studies/{study_id}/table-mode/thermals", headers=user_headers, @@ -224,8 +326,8 @@ def test_lifecycle__nominal( ) assert res.status_code == 200, res.json() expected = { - "de / 01_solar": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, - "de / 02_wind_on": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, + "de / 01_solar": {"group": "Other 2", "nominalCapacity": 500000.0, "so2": 8.25, "unitCount": 17}, + "de / 02_wind_on": {"group": "Nuclear", "nominalCapacity": 314159.0, "so2": 0.0, "unitCount": 15}, "de / 03_wind_off": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, "de / 04_res": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, "de / 05_nuclear": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, From d8ddee161885408e340ef2aee534bea6de6d73aa Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Wed, 3 Apr 2024 09:28:44 +0200 Subject: [PATCH 095/147] test(commands): correct unit test related to renewable commands --- .../command/test_create_renewables_cluster.py | 30 +++++++++++-------- 1 file changed, 18 insertions(+), 12 deletions(-) diff --git a/tests/variantstudy/model/command/test_create_renewables_cluster.py b/tests/variantstudy/model/command/test_create_renewables_cluster.py index bb91b82258..51e553bcb8 100644 --- a/tests/variantstudy/model/command/test_create_renewables_cluster.py +++ b/tests/variantstudy/model/command/test_create_renewables_cluster.py @@ -1,5 +1,6 @@ import configparser import re +from unittest import mock import pytest from pydantic import ValidationError @@ -16,7 +17,8 @@ class TestCreateRenewablesCluster: - def test_init(self, command_context: CommandContext): + # noinspection SpellCheckingInspection + def test_init(self, command_context: CommandContext) -> None: cl = CreateRenewablesCluster( area_id="foo", cluster_name="Cluster1", @@ -34,12 +36,13 @@ def test_init(self, command_context: CommandContext): assert cl.cluster_name == "Cluster1" assert cl.parameters == {"group": "Solar Thermal", "nominalcapacity": "2400", "unitcount": "2"} - def test_validate_cluster_name(self, command_context: CommandContext): + def test_validate_cluster_name(self, command_context: CommandContext) -> None: with pytest.raises(ValidationError, match="cluster_name"): CreateRenewablesCluster(area_id="fr", cluster_name="%", command_context=command_context, parameters={}) - def test_apply(self, empty_study: FileStudy, command_context: CommandContext): + def test_apply(self, empty_study: FileStudy, command_context: CommandContext) -> None: empty_study.config.enr_modelling = EnrModelling.CLUSTERS.value + empty_study.config.version = 810 study_path = empty_study.config.study_path area_name = "DE" area_id = transform_name_to_id(area_name, lower=True) @@ -107,7 +110,8 @@ def test_apply(self, empty_study: FileStudy, command_context: CommandContext): flags=re.IGNORECASE, ) - def test_to_dto(self, command_context: CommandContext): + # noinspection SpellCheckingInspection + def test_to_dto(self, command_context: CommandContext) -> None: command = CreateRenewablesCluster( area_id="foo", cluster_name="Cluster1", @@ -127,7 +131,7 @@ def test_to_dto(self, command_context: CommandContext): } -def test_match(command_context: CommandContext): +def test_match(command_context: CommandContext) -> None: base = CreateRenewablesCluster( area_id="foo", cluster_name="foo", @@ -159,23 +163,25 @@ def test_match(command_context: CommandContext): assert base.get_inner_matrices() == [] -def test_revert(command_context: CommandContext): +def test_revert(command_context: CommandContext) -> None: base = CreateRenewablesCluster( - area_id="foo", - cluster_name="foo", + area_id="area_foo", + cluster_name="cl1", parameters={}, command_context=command_context, ) - assert CommandReverter().revert(base, [], None) == [ + file_study = mock.MagicMock(spec=FileStudy) + revert_cmd = CommandReverter().revert(base, [], file_study) + assert revert_cmd == [ RemoveRenewablesCluster( - area_id="foo", - cluster_id="foo", + area_id="area_foo", + cluster_id="cl1", command_context=command_context, ) ] -def test_create_diff(command_context: CommandContext): +def test_create_diff(command_context: CommandContext) -> None: base = CreateRenewablesCluster( area_id="foo", cluster_name="foo", From 5c14213023f42991869f9863103b2aa56032d5c4 Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Wed, 3 Apr 2024 18:41:47 +0200 Subject: [PATCH 096/147] test: simplify integration tests --- tests/integration/test_integration.py | 494 +------------------------- 1 file changed, 19 insertions(+), 475 deletions(-) diff --git a/tests/integration/test_integration.py b/tests/integration/test_integration.py index 133f7a8f29..8c03873992 100644 --- a/tests/integration/test_integration.py +++ b/tests/integration/test_integration.py @@ -6,24 +6,14 @@ from starlette.testclient import TestClient -from antarest.core.model import PublicMode from antarest.launcher.model import LauncherLoadDTO -from antarest.study.business.adequacy_patch_management import PriceTakingOrder from antarest.study.business.area_management import LayerInfoDTO -from antarest.study.business.areas.renewable_management import TimeSeriesInterpretation from antarest.study.business.general_management import Mode from antarest.study.business.optimization_management import ( SimplexOptimizationRange, TransmissionCapacities, UnfeasibleProblemBehavior, ) -from antarest.study.business.table_mode_management import AssetType, TableTemplateType, TransmissionCapacity -from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import ( - BindingConstraintFrequency, - BindingConstraintOperator, -) -from antarest.study.storage.rawstudy.model.filesystem.config.renewable import RenewableClusterGroup -from antarest.study.storage.rawstudy.model.filesystem.config.thermal import LawOption, LocalTSGenerationBehavior from antarest.study.storage.variantstudy.model.command.common import CommandName from tests.integration.assets import ASSETS_DIR from tests.integration.utils import wait_for @@ -294,7 +284,7 @@ def test_main(client: TestClient, admin_access_token: str, study_id: str) -> Non res = client.get("/v1/launcher/load", headers=admin_headers) assert res.status_code == 200, res.json() - launcher_load = LauncherLoadDTO.parse_obj(res.json()) + launcher_load = LauncherLoadDTO(**res.json()) assert launcher_load.allocated_cpu_rate == 100 / (os.cpu_count() or 1) assert launcher_load.cluster_load_rate == 100 / (os.cpu_count() or 1) assert launcher_load.nb_queued_jobs == 0 @@ -517,8 +507,8 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: "args": { "name": "binding constraint 1", "enabled": True, - "time_step": BindingConstraintFrequency.HOURLY.value, - "operator": BindingConstraintOperator.LESS.value, + "time_step": "hourly", + "operator": "less", "coeffs": {"area 1.cluster 1": [2.0, 4]}, }, } @@ -535,8 +525,8 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: "args": { "name": "binding constraint 2", "enabled": True, - "time_step": BindingConstraintFrequency.HOURLY.value, - "operator": BindingConstraintOperator.LESS.value, + "time_step": "hourly", + "operator": "less", "coeffs": {}, }, } @@ -793,7 +783,7 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: "ntcBetweenPhysicalAreasOutAdequacyPatch": True, "checkCsrCostFunction": False, "includeHurdleCostCsr": False, - "priceTakingOrder": PriceTakingOrder.DENS.value, + "priceTakingOrder": "DENS", "thresholdInitiateCurtailmentSharingRule": 0.0, "thresholdDisplayLocalMatchingRuleViolations": 0.0, "thresholdCsrVariableBoundsRelaxation": 3, @@ -804,7 +794,7 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: headers=admin_headers, json={ "ntcBetweenPhysicalAreasOutAdequacyPatch": False, - "priceTakingOrder": PriceTakingOrder.LOAD.value, + "priceTakingOrder": "Load", "thresholdDisplayLocalMatchingRuleViolations": 1.1, }, ) @@ -816,7 +806,7 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: "ntcBetweenPhysicalAreasOutAdequacyPatch": False, "checkCsrCostFunction": False, "includeHurdleCostCsr": False, - "priceTakingOrder": PriceTakingOrder.LOAD.value, + "priceTakingOrder": "Load", "thresholdInitiateCurtailmentSharingRule": 0.0, "thresholdDisplayLocalMatchingRuleViolations": 1.1, "thresholdCsrVariableBoundsRelaxation": 3, @@ -1323,452 +1313,6 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: "ntc": {"stochasticTsStatus": False, "intraModal": False}, } - # --- TableMode START --- - - table_mode_url = f"/v1/studies/{study_id}/tablemode" - - # Table Mode - Area - - res_table_data = client.get( - table_mode_url, - headers=admin_headers, - params={ - "table_type": TableTemplateType.AREA.value, - "columns": ",".join(FIELDS_INFO_BY_TYPE[TableTemplateType.AREA]), - }, - ) - res_table_data_json = res_table_data.json() - assert res_table_data_json == { - "area 1": { - "nonDispatchablePower": False, - "dispatchableHydroPower": False, - "otherDispatchablePower": False, - "averageUnsuppliedEnergyCost": 2.0, - "spreadUnsuppliedEnergyCost": 0.0, - "averageSpilledEnergyCost": 4.0, - "spreadSpilledEnergyCost": 0.0, - "filterSynthesis": "monthly, annual", - "filterYearByYear": "hourly, daily, annual", - "adequacyPatchMode": "inside", - }, - "area 2": { - "nonDispatchablePower": True, - "dispatchableHydroPower": True, - "otherDispatchablePower": True, - "averageUnsuppliedEnergyCost": 0.0, - "spreadUnsuppliedEnergyCost": 0.0, - "averageSpilledEnergyCost": 0.0, - "spreadSpilledEnergyCost": 0.0, - "filterSynthesis": "hourly, daily, weekly, monthly, annual", - "filterYearByYear": "hourly, daily, weekly, monthly, annual", - "adequacyPatchMode": "outside", - }, - } - - client.put( - table_mode_url, - headers=admin_headers, - params={ - "table_type": TableTemplateType.AREA.value, - }, - json={ - "area 1": { - "nonDispatchablePower": True, - "spreadSpilledEnergyCost": 1.1, - "filterYearByYear": "monthly, annual", - "adequacyPatchMode": "outside", - }, - "area 2": { - "nonDispatchablePower": False, - "spreadSpilledEnergyCost": 3.0, - "filterSynthesis": "hourly", - "adequacyPatchMode": "inside", - }, - }, - ) - res_table_data = client.get( - table_mode_url, - headers=admin_headers, - params={ - "table_type": TableTemplateType.AREA.value, - "columns": ",".join(list(FIELDS_INFO_BY_TYPE[TableTemplateType.AREA])), - }, - ) - res_table_data_json = res_table_data.json() - assert res_table_data_json == { - "area 1": { - "nonDispatchablePower": True, - "dispatchableHydroPower": False, - "otherDispatchablePower": False, - "averageUnsuppliedEnergyCost": 2.0, - "spreadUnsuppliedEnergyCost": 0.0, - "averageSpilledEnergyCost": 4.0, - "spreadSpilledEnergyCost": 1.1, - "filterSynthesis": "monthly, annual", - "filterYearByYear": "monthly, annual", - "adequacyPatchMode": "outside", - }, - "area 2": { - "nonDispatchablePower": False, - "dispatchableHydroPower": True, - "otherDispatchablePower": True, - "averageUnsuppliedEnergyCost": 0.0, - "spreadUnsuppliedEnergyCost": 0.0, - "averageSpilledEnergyCost": 0.0, - "spreadSpilledEnergyCost": 3.0, - "filterSynthesis": "hourly", - "filterYearByYear": "hourly, daily, weekly, monthly, annual", - "adequacyPatchMode": "inside", - }, - } - - # Table Mode - Link - - res_table_data = client.get( - table_mode_url, - headers=admin_headers, - params={ - "table_type": TableTemplateType.LINK.value, - "columns": ",".join(FIELDS_INFO_BY_TYPE[TableTemplateType.LINK]), - }, - ) - res_table_data_json = res_table_data.json() - assert res_table_data_json == { - "area 1 / area 2": { - "hurdlesCost": False, - "loopFlow": False, - "usePhaseShifter": False, - "transmissionCapacities": "enabled", - "assetType": "ac", - "linkStyle": "plain", - "linkWidth": True, - "displayComments": True, - "filterSynthesis": "hourly, daily, weekly, monthly, annual", - "filterYearByYear": "hourly, daily, weekly, monthly, annual", - } - } - - client.put( - table_mode_url, - headers=admin_headers, - params={ - "table_type": TableTemplateType.LINK.value, - }, - json={ - "area 1 / area 2": { - "hurdlesCost": True, - "transmissionCapacities": TransmissionCapacity.IGNORE.value, - "assetType": AssetType.GAZ.value, - "filterSynthesis": "daily,annual", - } - }, - ) - res_table_data = client.get( - table_mode_url, - headers=admin_headers, - params={ - "table_type": TableTemplateType.LINK.value, - "columns": ",".join(FIELDS_INFO_BY_TYPE[TableTemplateType.LINK]), - }, - ) - res_table_data_json = res_table_data.json() - assert res_table_data_json == { - "area 1 / area 2": { - "hurdlesCost": True, - "loopFlow": False, - "usePhaseShifter": False, - "transmissionCapacities": "ignore", - "assetType": "gaz", - "linkStyle": "plain", - "linkWidth": True, - "displayComments": True, - "filterSynthesis": "daily,annual", - "filterYearByYear": "hourly, daily, weekly, monthly, annual", - } - } - - # Table Mode - Cluster - - res_table_data = client.get( - table_mode_url, - headers=admin_headers, - params={ - "table_type": TableTemplateType.THERMAL_CLUSTER, - "columns": ",".join(FIELDS_INFO_BY_TYPE[TableTemplateType.THERMAL_CLUSTER]), - }, - ) - res_table_data_json = res_table_data.json() - assert res_table_data_json == { - "area 1 / cluster 1": { - "group": "", - "enabled": True, - "mustRun": False, - "unitCount": 0, - "nominalCapacity": 0, - "minStablePower": 0, - "spinning": 0, - "minUpTime": 1, - "minDownTime": 1, - "co2": 0, - "marginalCost": 0, - "fixedCost": 0, - "startupCost": 0, - "marketBidCost": 0, - "spreadCost": 0, - "tsGen": "use global", - "volatilityForced": 0, - "volatilityPlanned": 0, - "lawForced": "uniform", - "lawPlanned": "uniform", - }, - "area 2 / cluster 2": { - "group": "", - "enabled": True, - "mustRun": False, - "unitCount": 0, - "nominalCapacity": 0, - "minStablePower": 0, - "spinning": 0, - "minUpTime": 1, - "minDownTime": 1, - "co2": 0, - "marginalCost": 0, - "fixedCost": 0, - "startupCost": 0, - "marketBidCost": 0, - "spreadCost": 0, - "tsGen": "use global", - "volatilityForced": 0, - "volatilityPlanned": 0, - "lawForced": "uniform", - "lawPlanned": "uniform", - }, - } - - client.put( - table_mode_url, - headers=admin_headers, - params={ - "table_type": TableTemplateType.THERMAL_CLUSTER, - }, - json={ - "area 1 / cluster 1": { - "enabled": False, - "unitCount": 3, - "spinning": 8, - "tsGen": LocalTSGenerationBehavior.FORCE_GENERATION.value, - "lawPlanned": LawOption.GEOMETRIC.value, - }, - "area 2 / cluster 2": { - "nominalCapacity": 2, - }, - }, - ) - res_table_data = client.get( - table_mode_url, - headers=admin_headers, - params={ - "table_type": TableTemplateType.THERMAL_CLUSTER, - "columns": ",".join(FIELDS_INFO_BY_TYPE[TableTemplateType.THERMAL_CLUSTER]), - }, - ) - res_table_data_json = res_table_data.json() - assert res_table_data_json == { - "area 1 / cluster 1": { - "group": "", - "enabled": False, - "mustRun": False, - "unitCount": 3, - "nominalCapacity": 0, - "minStablePower": 0, - "spinning": 8, - "minUpTime": 1, - "minDownTime": 1, - "co2": 0, - "marginalCost": 0, - "fixedCost": 0, - "startupCost": 0, - "marketBidCost": 0, - "spreadCost": 0, - "tsGen": "force generation", - "volatilityForced": 0, - "volatilityPlanned": 0, - "lawForced": "uniform", - "lawPlanned": "geometric", - }, - "area 2 / cluster 2": { - "group": "", - "enabled": True, - "mustRun": False, - "unitCount": 0, - "nominalCapacity": 2, - "minStablePower": 0, - "spinning": 0, - "minUpTime": 1, - "minDownTime": 1, - "co2": 0, - "marginalCost": 0, - "fixedCost": 0, - "startupCost": 0, - "marketBidCost": 0, - "spreadCost": 0, - "tsGen": "use global", - "volatilityForced": 0, - "volatilityPlanned": 0, - "lawForced": "uniform", - "lawPlanned": "uniform", - }, - } - - # Table Mode - Renewable - - res_table_data = client.get( - table_mode_url, - headers=admin_headers, - params={ - "table_type": TableTemplateType.RENEWABLE_CLUSTER, - "columns": ",".join(FIELDS_INFO_BY_TYPE[TableTemplateType.RENEWABLE_CLUSTER]), - }, - ) - res_table_data_json = res_table_data.json() - assert res_table_data_json == { - "area 1 / cluster renewable 1": { - "group": "", - "tsInterpretation": TimeSeriesInterpretation.POWER_GENERATION.value, - "enabled": True, - "unitCount": 0, - "nominalCapacity": 0, - }, - "area 2 / cluster renewable 2": { - "group": "", - "tsInterpretation": TimeSeriesInterpretation.POWER_GENERATION.value, - "enabled": True, - "unitCount": 0, - "nominalCapacity": 0, - }, - } - - client.put( - table_mode_url, - headers=admin_headers, - params={ - "table_type": TableTemplateType.RENEWABLE_CLUSTER, - }, - json={ - "area 1 / cluster renewable 1": { - "tsInterpretation": TimeSeriesInterpretation.PRODUCTION_FACTOR.value, - "enabled": False, - }, - "area 2 / cluster renewable 2": { - "unitCount": 2, - "nominalCapacity": 13, - }, - }, - ) - res_table_data = client.get( - table_mode_url, - headers=admin_headers, - params={ - "table_type": TableTemplateType.RENEWABLE_CLUSTER, - "columns": ",".join(FIELDS_INFO_BY_TYPE[TableTemplateType.RENEWABLE_CLUSTER]), - }, - ) - res_table_data_json = res_table_data.json() - assert res_table_data_json == { - "area 1 / cluster renewable 1": { - "group": "", - "tsInterpretation": TimeSeriesInterpretation.PRODUCTION_FACTOR.value, - "enabled": False, - "unitCount": 0, - "nominalCapacity": 0, - }, - "area 2 / cluster renewable 2": { - "group": "", - "tsInterpretation": TimeSeriesInterpretation.POWER_GENERATION.value, - "enabled": True, - "unitCount": 2, - "nominalCapacity": 13, - }, - } - - # Table Mode - Binding Constraint - - res_table_data = client.get( - table_mode_url, - headers=admin_headers, - params={ - "table_type": TableTemplateType.BINDING_CONSTRAINT.value, - "columns": ",".join(FIELDS_INFO_BY_TYPE[TableTemplateType.BINDING_CONSTRAINT]), - }, - ) - res_table_data_json = res_table_data.json() - assert res_table_data_json == { - "binding constraint 1": { - "enabled": True, - "type": BindingConstraintFrequency.HOURLY.value, - "operator": BindingConstraintOperator.LESS.value, - "group": "default", - }, - "binding constraint 2": { - "enabled": True, - "type": BindingConstraintFrequency.HOURLY.value, - "operator": BindingConstraintOperator.LESS.value, - "group": "default", - }, - } - - client.put( - table_mode_url, - headers=admin_headers, - params={ - "table_type": TableTemplateType.BINDING_CONSTRAINT.value, - }, - json={ - "binding constraint 1": { - "enabled": False, - "operator": BindingConstraintOperator.BOTH.value, - }, - "binding constraint 2": { - "type": BindingConstraintFrequency.WEEKLY.value, - "operator": BindingConstraintOperator.EQUAL.value, - }, - }, - ) - res_table_data = client.get( - table_mode_url, - headers=admin_headers, - params={ - "table_type": TableTemplateType.BINDING_CONSTRAINT.value, - "columns": ",".join(FIELDS_INFO_BY_TYPE[TableTemplateType.BINDING_CONSTRAINT]), - }, - ) - res_table_data_json = res_table_data.json() - assert res_table_data_json == { - "binding constraint 1": { - "enabled": False, - "type": BindingConstraintFrequency.HOURLY.value, - "operator": BindingConstraintOperator.BOTH.value, - "group": "default", - }, - "binding constraint 2": { - "enabled": True, - "type": BindingConstraintFrequency.WEEKLY.value, - "operator": BindingConstraintOperator.EQUAL.value, - "group": "default", - }, - } - - res = client.get(f"/v1/studies/{study_id}/bindingconstraints/binding constraint 1", headers=admin_headers) - binding_constraint_1 = res.json() - assert res.status_code == 200, res.json() - - term = binding_constraint_1["terms"][0] - assert term["id"] == "area 1.cluster 1" - assert term["weight"] == 2.0 - assert term["offset"] == 4 - - # --- TableMode END --- - # Renewable form res = client.put( @@ -1776,7 +1320,7 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: headers=admin_headers, json={ "name": "cluster renewable 1 renamed", - "tsInterpretation": TimeSeriesInterpretation.PRODUCTION_FACTOR, + "tsInterpretation": "production-factor", "unitCount": 9, "enabled": False, "nominalCapacity": 3, @@ -1790,11 +1334,11 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: ) expected = { "enabled": False, - "group": RenewableClusterGroup.OTHER1, # Default group used when not specified. + "group": "Other RES 1", # Default group used when not specified. "id": "cluster renewable 1", "name": "cluster renewable 1 renamed", "nominalCapacity": 3.0, - "tsInterpretation": TimeSeriesInterpretation.PRODUCTION_FACTOR, + "tsInterpretation": "production-factor", "unitCount": 9, } assert res.status_code == 200, res.json() @@ -1882,11 +1426,11 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: "color_r": 255, "color_g": 0, "color_b": 100, - "layers": 0, + "layers": "0", }, "layerX": {"0": 100}, "layerY": {"0": 100}, - "layerColor": {"0": "255 , 0 , 100"}, + "layerColor": {"0": "255, 0, 100"}, }, "area 2": { "ui": { @@ -1899,7 +1443,7 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: }, "layerX": {"0": 0, "1": 105}, "layerY": {"0": 0, "1": 105}, - "layerColor": {"0": "230 , 108 , 44", "1": "255 , 10 , 100"}, + "layerColor": {"0": "230, 108, 44", "1": "255, 10, 100"}, }, } @@ -1924,7 +1468,7 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: "min-stable-power": None, "min-up-time": None, "name": "cluster 2", - "nominalcapacity": 2, + "nominalcapacity": 0, "spinning": None, "spread-cost": None, "type": None, @@ -2050,7 +1594,7 @@ def test_import(client: TestClient, admin_access_token: str, study_id: str) -> N ).json() res = client.get(f"v1/studies/{uuid}", headers=admin_headers).json() assert res["groups"] == [{"id": "admin", "name": "admin"}] - assert res["public_mode"] == PublicMode.NONE + assert res["public_mode"] == "NONE" # Create user George who belongs to no group client.post( @@ -2070,7 +1614,7 @@ def test_import(client: TestClient, admin_access_token: str, study_id: str) -> N ).json() res = client.get(f"v1/studies/{uuid}", headers=georges_headers).json() assert res["groups"] == [] - assert res["public_mode"] == PublicMode.READ + assert res["public_mode"] == "READ" # Study importer works for 7z files res = client.post( @@ -2138,7 +1682,7 @@ def test_copy(client: TestClient, admin_access_token: str, study_id: str) -> Non # asserts that it has admin groups and PublicMode to NONE res = client.get(f"/v1/studies/{copied.json()}", headers=admin_headers).json() assert res["groups"] == [{"id": "admin", "name": "admin"}] - assert res["public_mode"] == PublicMode.NONE + assert res["public_mode"] == "NONE" # Connect with user George who belongs to no group res = client.post("/v1/login", json={"username": "George", "password": "mypass"}) @@ -2153,4 +1697,4 @@ def test_copy(client: TestClient, admin_access_token: str, study_id: str) -> Non # asserts that it has no groups and PublicMode to READ res = client.get(f"/v1/studies/{copied.json()}", headers=admin_headers).json() assert res["groups"] == [] - assert res["public_mode"] == PublicMode.READ + assert res["public_mode"] == "READ" From 2b63395afcd9860b986eeb920a19807386626b5a Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Wed, 3 Apr 2024 18:46:59 +0200 Subject: [PATCH 097/147] feat(tablemode): implement table mode update --- antarest/study/business/area_management.py | 144 +++- .../business/areas/renewable_management.py | 68 +- .../business/areas/st_storage_management.py | 57 +- .../business/areas/thermal_management.py | 51 +- .../business/binding_constraint_management.py | 32 +- antarest/study/business/link_management.py | 15 +- .../study/business/table_mode_management.py | 768 +++--------------- .../rawstudy/model/filesystem/config/area.py | 28 +- .../model/filesystem/config/cluster.py | 14 +- .../model/filesystem/config/ini_properties.py | 8 +- .../rawstudy/model/filesystem/config/links.py | 1 + .../model/filesystem/config/renewable.py | 31 +- .../model/filesystem/config/st_storage.py | 33 +- .../model/filesystem/config/thermal.py | 74 +- .../command/create_binding_constraint.py | 19 + antarest/study/web/study_data_blueprint.py | 17 +- .../test_binding_constraints.py | 2 +- .../study_data_blueprint/test_table_mode.py | 486 ++++++++--- .../storage/business/test_arealink_manager.py | 2 +- .../areas/test_st_storage_management.py | 6 +- 20 files changed, 945 insertions(+), 911 deletions(-) diff --git a/antarest/study/business/area_management.py b/antarest/study/business/area_management.py index 9c4967ca66..d7909acb75 100644 --- a/antarest/study/business/area_management.py +++ b/antarest/study/business/area_management.py @@ -6,6 +6,7 @@ from pydantic import BaseModel, Extra, Field from antarest.core.exceptions import ConfigFileNotFound, DuplicateAreaName, LayerNotAllowedToBeDeleted, LayerNotFound +from antarest.core.model import JSON from antarest.study.business.utils import AllOptionalMetaclass, camel_case_model, execute_or_add_commands from antarest.study.model import Patch, PatchArea, PatchCluster, RawStudy, Study from antarest.study.repository import StudyMetadataRepository @@ -16,6 +17,7 @@ AreaUI, OptimizationProperties, ThermalAreasProperties, + UIProperties, ) from antarest.study.storage.rawstudy.model.filesystem.config.model import Area, DistrictSet, transform_name_to_id from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy @@ -40,6 +42,7 @@ class AreaCreationDTO(BaseModel): set: t.Optional[t.List[str]] +# review: is this class necessary? class ClusterInfoDTO(PatchCluster): id: str name: str @@ -85,7 +88,9 @@ def _get_ui_info_map(file_study: FileStudy, area_ids: t.Sequence[str]) -> t.Dict # instead of raising an obscure exception. if not area_ids: return {} + ui_info_map = file_study.tree.get(["input", "areas", ",".join(area_ids), "ui"]) + # If there is only one ID in the `area_ids`, the result returned from # the `file_study.tree.get` call will be a single UI object. # On the other hand, if there are multiple values in `area_ids`, @@ -93,6 +98,10 @@ def _get_ui_info_map(file_study: FileStudy, area_ids: t.Sequence[str]) -> t.Dict # and the values are the corresponding UI objects. if len(area_ids) == 1: ui_info_map = {area_ids[0]: ui_info_map} + + # Convert to UIProperties to ensure that the UI object is valid. + ui_info_map = {area_id: UIProperties(**ui_info).to_config() for area_id, ui_info in ui_info_map.items()} + return ui_info_map @@ -133,19 +142,19 @@ class _BaseAreaDTO( # noinspection SpellCheckingInspection @camel_case_model -class GetAreaDTO(_BaseAreaDTO, metaclass=AllOptionalMetaclass): +class AreaOutput(_BaseAreaDTO, metaclass=AllOptionalMetaclass, use_none=True): """ DTO object use to get the area information using a flat structure. """ @classmethod - def create_area_dto( + def from_model( cls, area_folder: AreaFolder, *, average_unsupplied_energy_cost: float, average_spilled_energy_cost: float, - ) -> "GetAreaDTO": + ) -> "AreaOutput": """ Creates a `GetAreaDTO` object from configuration data. @@ -166,6 +175,30 @@ def create_area_dto( } return cls(**obj) + def _to_optimization(self) -> OptimizationProperties: + obj = {name: getattr(self, name) for name in OptimizationProperties.FilteringSection.__fields__} + filtering_section = OptimizationProperties.FilteringSection(**obj) + obj = {name: getattr(self, name) for name in OptimizationProperties.ModalOptimizationSection.__fields__} + nodal_optimization_section = OptimizationProperties.ModalOptimizationSection(**obj) + return OptimizationProperties( + filtering=filtering_section, + nodal_optimization=nodal_optimization_section, + ) + + def _to_adequacy_patch(self) -> AdequacyPathProperties: + obj = {name: getattr(self, name) for name in AdequacyPathProperties.AdequacyPathSection.__fields__} + adequacy_path_section = AdequacyPathProperties.AdequacyPathSection(**obj) + return AdequacyPathProperties(adequacy_patch=adequacy_path_section) + + @property + def area_folder(self) -> AreaFolder: + area_folder = AreaFolder( + optimization=self._to_optimization(), + adequacy_patch=self._to_adequacy_patch(), + # ui properties are not included in the AreaFolder. + ) + return area_folder + class AreaManager: """ @@ -194,7 +227,7 @@ def __init__( self.patch_service = PatchService(repository=repository) # noinspection SpellCheckingInspection - def get_all_area_props(self, study: RawStudy) -> t.Mapping[str, GetAreaDTO]: + def get_all_area_props(self, study: RawStudy) -> t.Mapping[str, AreaOutput]: """ Retrieves all areas of a study. @@ -232,7 +265,7 @@ def get_all_area_props(self, study: RawStudy) -> t.Mapping[str, GetAreaDTO]: area_map = {} for area_id, area_cfg in areas_cfg.items(): area_folder = AreaFolder(**area_cfg) - area_map[area_id] = GetAreaDTO.create_area_dto( + area_map[area_id] = AreaOutput.from_model( area_folder, average_unsupplied_energy_cost=thermal_areas.unserverd_energy_cost.get(area_id, 0.0), average_spilled_energy_cost=thermal_areas.spilled_energy_cost.get(area_id, 0.0), @@ -240,6 +273,79 @@ def get_all_area_props(self, study: RawStudy) -> t.Mapping[str, GetAreaDTO]: return area_map + # noinspection SpellCheckingInspection + def update_areas_props( + self, study: RawStudy, update_areas_by_ids: t.Mapping[str, AreaOutput] + ) -> t.Mapping[str, AreaOutput]: + """ + Update the properties of ares. + + Args: + study: The raw study object. + update_areas_by_ids: A mapping of area IDs to area properties. + + Returns: + A mapping of ALL area IDs to area properties. + """ + old_areas_by_ids = self.get_all_area_props(study) + new_areas_by_ids = {k: v for k, v in old_areas_by_ids.items()} + + # Prepare the commands to update the thermal clusters. + commands = [] + command_context = self.storage_service.variant_study_service.command_factory.command_context + + for area_id, update_area in update_areas_by_ids.items(): + # Update the area properties. + old_area = old_areas_by_ids[area_id] + new_area = old_area.copy(update=update_area.dict(by_alias=False, exclude_none=True)) + new_areas_by_ids[area_id] = new_area + + # Convert the DTO to a configuration object and update the configuration file. + old_area_folder = old_area.area_folder + new_area_folder = new_area.area_folder + + if old_area_folder.optimization != new_area_folder.optimization: + commands.append( + UpdateConfig( + target=f"input/areas/{area_id}/optimization", + data=new_area_folder.optimization.to_config(), + command_context=command_context, + ) + ) + if old_area_folder.adequacy_patch != new_area_folder.adequacy_patch: + commands.append( + UpdateConfig( + target=f"input/areas/{area_id}/adequacy_patch", + data=new_area_folder.adequacy_patch.to_config(), + command_context=command_context, + ) + ) + if old_area.average_unsupplied_energy_cost != new_area.average_unsupplied_energy_cost: + commands.append( + UpdateConfig( + target=f"input/thermal/areas/unserverdenergycost/{area_id}", + data=new_area.average_unsupplied_energy_cost, + command_context=command_context, + ) + ) + if old_area.average_spilled_energy_cost != new_area.average_spilled_energy_cost: + commands.append( + UpdateConfig( + target=f"input/thermal/areas/spilledenergycost:{area_id}", + data=new_area.average_spilled_energy_cost, + command_context=command_context, + ) + ) + + file_study = self.storage_service.get_storage(study).get_raw(study) + execute_or_add_commands(study, file_study, commands, self.storage_service) + + return new_areas_by_ids + + @staticmethod + def get_table_schema() -> JSON: + return AreaOutput.schema() + def get_all_areas(self, study: RawStudy, area_type: t.Optional[AreaType] = None) -> t.List[AreaInfoDTO]: """ Retrieves all areas and districts of a raw study based on the area type. @@ -496,32 +602,33 @@ def update_area_metadata( ) def update_area_ui(self, study: Study, area_id: str, area_ui: AreaUI, layer: str = "0") -> None: + obj = area_ui.to_config() file_study = self.storage_service.get_storage(study).get_raw(study) commands = ( [ UpdateConfig( target=f"input/areas/{area_id}/ui/ui/x", - data=area_ui.x, + data=obj["x"], command_context=self.storage_service.variant_study_service.command_factory.command_context, ), UpdateConfig( target=f"input/areas/{area_id}/ui/ui/y", - data=area_ui.y, + data=obj["y"], command_context=self.storage_service.variant_study_service.command_factory.command_context, ), UpdateConfig( target=f"input/areas/{area_id}/ui/ui/color_r", - data=area_ui.color_rgb[0], + data=obj["color_r"], command_context=self.storage_service.variant_study_service.command_factory.command_context, ), UpdateConfig( target=f"input/areas/{area_id}/ui/ui/color_g", - data=area_ui.color_rgb[1], + data=obj["color_g"], command_context=self.storage_service.variant_study_service.command_factory.command_context, ), UpdateConfig( target=f"input/areas/{area_id}/ui/ui/color_b", - data=area_ui.color_rgb[2], + data=obj["color_b"], command_context=self.storage_service.variant_study_service.command_factory.command_context, ), ] @@ -532,17 +639,17 @@ def update_area_ui(self, study: Study, area_id: str, area_ui: AreaUI, layer: str [ UpdateConfig( target=f"input/areas/{area_id}/ui/layerX/{layer}", - data=area_ui.x, + data=obj["x"], command_context=self.storage_service.variant_study_service.command_factory.command_context, ), UpdateConfig( target=f"input/areas/{area_id}/ui/layerY/{layer}", - data=area_ui.y, + data=obj["y"], command_context=self.storage_service.variant_study_service.command_factory.command_context, ), UpdateConfig( target=f"input/areas/{area_id}/ui/layerColor/{layer}", - data=f"{str(area_ui.color_rgb[0])} , {str(area_ui.color_rgb[1])} , {str(area_ui.color_rgb[2])}", + data=f"{obj['color_r']},{obj['color_g']},{obj['color_b']}", command_context=self.storage_service.variant_study_service.command_factory.command_context, ), ] @@ -593,11 +700,8 @@ def _update_with_cluster_metadata( def _get_clusters(file_study: FileStudy, area: str, metadata_patch: Patch) -> t.List[ClusterInfoDTO]: thermal_clusters_data = file_study.tree.get(["input", "thermal", "clusters", area, "list"]) cluster_patch = metadata_patch.thermal_clusters or {} - return [ - AreaManager._update_with_cluster_metadata( - area, - ClusterInfoDTO.parse_obj({**thermal_clusters_data[tid], "id": tid}), - cluster_patch, - ) - for tid in thermal_clusters_data + result = [ + AreaManager._update_with_cluster_metadata(area, ClusterInfoDTO(id=tid, **obj), cluster_patch) + for tid, obj in thermal_clusters_data.items() ] + return result diff --git a/antarest/study/business/areas/renewable_management.py b/antarest/study/business/areas/renewable_management.py index c721f0bb89..1009c9d22c 100644 --- a/antarest/study/business/areas/renewable_management.py +++ b/antarest/study/business/areas/renewable_management.py @@ -1,9 +1,11 @@ +import collections import json import typing as t from pydantic import validator from antarest.core.exceptions import DuplicateRenewableCluster, RenewableClusterConfigNotFound, RenewableClusterNotFound +from antarest.core.model import JSON from antarest.study.business.all_optional_meta import AllOptionalMetaclass, camel_case_model from antarest.study.business.enum_ignore_case import EnumIgnoreCase from antarest.study.business.utils import execute_or_add_commands @@ -22,14 +24,6 @@ from antarest.study.storage.variantstudy.model.command.replace_matrix import ReplaceMatrix from antarest.study.storage.variantstudy.model.command.update_config import UpdateConfig -__all__ = ( - "RenewableClusterInput", - "RenewableClusterCreation", - "RenewableClusterOutput", - "RenewableManager", - "TimeSeriesInterpretation", -) - _CLUSTER_PATH = "input/renewables/clusters/{area_id}/list/{cluster_id}" _CLUSTERS_PATH = "input/renewables/clusters/{area_id}/list" _ALL_CLUSTERS_PATH = "input/renewables/clusters" @@ -149,7 +143,7 @@ def get_clusters(self, study: Study, area_id: str) -> t.Sequence[RenewableCluste def get_all_renewables_props( self, study: Study, - ) -> t.Mapping[str, t.Sequence[RenewableClusterOutput]]: + ) -> t.Mapping[str, t.Mapping[str, RenewableClusterOutput]]: """ Retrieve all renewable clusters from all areas within a study. @@ -157,7 +151,7 @@ def get_all_renewables_props( study: Study from which to retrieve the clusters. Returns: - A mapping of area IDs to lists of renewable clusters within the specified area. + A mapping of area IDs to a mapping of cluster IDs to cluster output. Raises: RenewableClusterConfigNotFound: If no clusters are found in the specified area. @@ -174,14 +168,13 @@ def get_all_renewables_props( raise RenewableClusterConfigNotFound(path) study_version = study.version - all_clusters = { - area_id: [ - create_renewable_output(study_version, cluster_id, cluster) - for cluster_id, cluster in cluster_obj.items() - ] - for area_id, cluster_obj in clusters.items() - } - return all_clusters + renewables_by_areas: t.MutableMapping[str, t.MutableMapping[str, RenewableClusterOutput]] + renewables_by_areas = collections.defaultdict(dict) + for area_id, cluster_obj in clusters.items(): + for cluster_id, cluster in cluster_obj.items(): + renewables_by_areas[area_id][cluster_id] = create_renewable_output(study_version, cluster_id, cluster) + + return renewables_by_areas def create_cluster( self, study: Study, area_id: str, cluster_data: RenewableClusterCreation @@ -366,3 +359,42 @@ def duplicate_cluster( execute_or_add_commands(study, self._get_file_study(study), commands, self.storage_service) return RenewableClusterOutput(**new_config.dict(by_alias=False)) + + def update_renewables_props( + self, + study: Study, + update_renewables_by_areas: t.Mapping[str, t.Mapping[str, RenewableClusterInput]], + ) -> t.Mapping[str, t.Mapping[str, RenewableClusterOutput]]: + old_renewables_by_areas = self.get_all_renewables_props(study) + new_renewables_by_areas = {area_id: dict(clusters) for area_id, clusters in old_renewables_by_areas.items()} + + # Prepare the commands to update the renewable clusters. + commands = [] + for area_id, update_renewables_by_ids in update_renewables_by_areas.items(): + old_renewables_by_ids = old_renewables_by_areas[area_id] + for renewable_id, update_cluster in update_renewables_by_ids.items(): + # Update the renewable cluster properties. + old_cluster = old_renewables_by_ids[renewable_id] + new_cluster = old_cluster.copy(update=update_cluster.dict(by_alias=False, exclude_none=True)) + new_renewables_by_areas[area_id][renewable_id] = new_cluster + + # Convert the DTO to a configuration object and update the configuration file. + properties = create_renewable_config( + study.version, **new_cluster.dict(by_alias=False, exclude_none=True) + ) + path = _CLUSTER_PATH.format(area_id=area_id, cluster_id=renewable_id) + cmd = UpdateConfig( + target=path, + data=json.loads(properties.json(by_alias=True, exclude={"id"})), + command_context=self.storage_service.variant_study_service.command_factory.command_context, + ) + commands.append(cmd) + + file_study = self.storage_service.get_storage(study).get_raw(study) + execute_or_add_commands(study, file_study, commands, self.storage_service) + + return new_renewables_by_areas + + @staticmethod + def get_table_schema() -> JSON: + return RenewableClusterOutput.schema() diff --git a/antarest/study/business/areas/st_storage_management.py b/antarest/study/business/areas/st_storage_management.py index a9b52a0cf3..05ade38c1c 100644 --- a/antarest/study/business/areas/st_storage_management.py +++ b/antarest/study/business/areas/st_storage_management.py @@ -1,3 +1,4 @@ +import collections import functools import json import operator @@ -15,6 +16,7 @@ STStorageMatrixNotFound, STStorageNotFound, ) +from antarest.core.model import JSON from antarest.study.business.all_optional_meta import AllOptionalMetaclass, camel_case_model from antarest.study.business.utils import execute_or_add_commands from antarest.study.model import Study @@ -331,7 +333,7 @@ def get_storages( def get_all_storages_props( self, study: Study, - ) -> t.Mapping[str, t.Sequence[STStorageOutput]]: + ) -> t.Mapping[str, t.Mapping[str, STStorageOutput]]: """ Retrieve all short-term storages from all areas within a study. @@ -339,7 +341,7 @@ def get_all_storages_props( study: Study from which to retrieve the storages. Returns: - A mapping of area IDs to lists of short-term storages within the specified area. + A mapping of area IDs to a mapping of storage IDs to storage configurations. Raises: STStorageConfigNotFound: If no storages are found in the specified area. @@ -355,11 +357,48 @@ def get_all_storages_props( except KeyError: raise STStorageConfigNotFound(path) from None - all_storages = { - area_id: [STStorageOutput.from_config(cluster_id, cluster) for cluster_id, cluster in cluster_obj.items()] - for area_id, cluster_obj in storages.items() - } - return all_storages + storages_by_areas: t.MutableMapping[str, t.MutableMapping[str, STStorageOutput]] + storages_by_areas = collections.defaultdict(dict) + for area_id, cluster_obj in storages.items(): + for cluster_id, cluster in cluster_obj.items(): + storages_by_areas[area_id][cluster_id] = STStorageOutput.from_config(cluster_id, cluster) + + return storages_by_areas + + def update_storages_props( + self, + study: Study, + update_storages_by_areas: t.Mapping[str, t.Mapping[str, STStorageInput]], + ) -> t.Mapping[str, t.Mapping[str, STStorageOutput]]: + old_storages_by_areas = self.get_all_storages_props(study) + new_storages_by_areas = {area_id: dict(clusters) for area_id, clusters in old_storages_by_areas.items()} + + # Prepare the commands to update the storage clusters. + commands = [] + for area_id, update_storages_by_ids in update_storages_by_areas.items(): + old_storages_by_ids = old_storages_by_areas[area_id] + for storage_id, update_cluster in update_storages_by_ids.items(): + # Update the storage cluster properties. + old_cluster = old_storages_by_ids[storage_id] + new_cluster = old_cluster.copy(update=update_cluster.dict(by_alias=False, exclude_none=True)) + new_storages_by_areas[area_id][storage_id] = new_cluster + + # Convert the DTO to a configuration object and update the configuration file. + properties = create_st_storage_config( + study.version, **new_cluster.dict(by_alias=False, exclude_none=True) + ) + path = _STORAGE_LIST_PATH.format(area_id=area_id, storage_id=storage_id) + cmd = UpdateConfig( + target=path, + data=json.loads(properties.json(by_alias=True, exclude={"id"})), + command_context=self.storage_service.variant_study_service.command_factory.command_context, + ) + commands.append(cmd) + + file_study = self.storage_service.get_storage(study).get_raw(study) + execute_or_add_commands(study, file_study, commands, self.storage_service) + + return new_storages_by_areas def get_storage( self, @@ -647,3 +686,7 @@ def validate_matrices( # Validation successful return True + + @staticmethod + def get_table_schema() -> JSON: + return STStorageOutput.schema() diff --git a/antarest/study/business/areas/thermal_management.py b/antarest/study/business/areas/thermal_management.py index 651af4b91f..205965eb54 100644 --- a/antarest/study/business/areas/thermal_management.py +++ b/antarest/study/business/areas/thermal_management.py @@ -1,3 +1,4 @@ +import collections import json import typing as t from pathlib import Path @@ -11,6 +12,7 @@ ThermalClusterNotFound, WrongMatrixHeightError, ) +from antarest.core.model import JSON from antarest.study.business.all_optional_meta import AllOptionalMetaclass, camel_case_model from antarest.study.business.utils import execute_or_add_commands from antarest.study.model import Study @@ -190,7 +192,7 @@ def get_clusters( def get_all_thermals_props( self, study: Study, - ) -> t.Mapping[str, t.Sequence[ThermalClusterOutput]]: + ) -> t.Mapping[str, t.Mapping[str, ThermalClusterOutput]]: """ Retrieve all thermal clusters from all areas within a study. @@ -198,7 +200,7 @@ def get_all_thermals_props( study: Study from which to retrieve the clusters. Returns: - A mapping of area IDs to lists of thermal clusters within the specified area. + A mapping of area IDs to a mapping of cluster IDs to thermal cluster configurations. Raises: ThermalClusterConfigNotFound: If no clusters are found in the specified area. @@ -215,36 +217,35 @@ def get_all_thermals_props( raise ThermalClusterConfigNotFound(path) from None study_version = study.version - all_clusters = { - area_id: [ - create_thermal_output(study_version, cluster_id, cluster) for cluster_id, cluster in cluster_obj.items() - ] - for area_id, cluster_obj in clusters.items() - } - return all_clusters + thermals_by_areas: t.MutableMapping[str, t.MutableMapping[str, ThermalClusterOutput]] + thermals_by_areas = collections.defaultdict(dict) + for area_id, cluster_obj in clusters.items(): + for cluster_id, cluster in cluster_obj.items(): + thermals_by_areas[area_id][cluster_id] = create_thermal_output(study_version, cluster_id, cluster) + + return thermals_by_areas def update_thermals_props( self, study: Study, - update_thermals_by_areas: t.Mapping[str, t.Sequence[ThermalClusterOutput]], - ) -> t.Mapping[str, t.Sequence[ThermalClusterOutput]]: + update_thermals_by_areas: t.Mapping[str, t.Mapping[str, ThermalClusterInput]], + ) -> t.Mapping[str, t.Mapping[str, ThermalClusterOutput]]: old_thermals_by_areas = self.get_all_thermals_props(study) - new_thermals_by_names: t.MutableMapping[str, t.MutableSequence[ThermalClusterOutput]] = {} - file_study = self.storage_service.get_storage(study).get_raw(study) + new_thermals_by_areas = {area_id: dict(clusters) for area_id, clusters in old_thermals_by_areas.items()} + + # Prepare the commands to update the thermal clusters. commands = [] - for area_id, update_thermals in update_thermals_by_areas.items(): - old_thermals = old_thermals_by_areas.get(area_id, []) - old_thermals_by_id = {cluster.id: cluster for cluster in old_thermals} - update_thermals_by_id = {cluster.id: cluster for cluster in update_thermals} - for cluster_id, update_cluster in update_thermals_by_id.items(): + for area_id, update_thermals_by_ids in update_thermals_by_areas.items(): + old_thermals_by_ids = old_thermals_by_areas[area_id] + for thermal_id, update_cluster in update_thermals_by_ids.items(): # Update the thermal cluster properties. - old_cluster = old_thermals_by_id[cluster_id] + old_cluster = old_thermals_by_ids[thermal_id] new_cluster = old_cluster.copy(update=update_cluster.dict(by_alias=False, exclude_none=True)) - new_thermals_by_names.setdefault(area_id, []).append(new_cluster) + new_thermals_by_areas[area_id][thermal_id] = new_cluster # Convert the DTO to a configuration object and update the configuration file. properties = create_thermal_config(study.version, **new_cluster.dict(by_alias=False, exclude_none=True)) - path = _CLUSTER_PATH.format(area_id=area_id, cluster_id=cluster_id) + path = _CLUSTER_PATH.format(area_id=area_id, cluster_id=thermal_id) cmd = UpdateConfig( target=path, data=json.loads(properties.json(by_alias=True, exclude={"id"})), @@ -252,8 +253,14 @@ def update_thermals_props( ) commands.append(cmd) + file_study = self.storage_service.get_storage(study).get_raw(study) execute_or_add_commands(study, file_study, commands, self.storage_service) - return new_thermals_by_names + + return new_thermals_by_areas + + @staticmethod + def get_table_schema() -> JSON: + return ThermalClusterOutput.schema() def create_cluster(self, study: Study, area_id: str, cluster_data: ThermalClusterCreation) -> ThermalClusterOutput: """ diff --git a/antarest/study/business/binding_constraint_management.py b/antarest/study/business/binding_constraint_management.py index b5037ca3b8..af37a9e1c5 100644 --- a/antarest/study/business/binding_constraint_management.py +++ b/antarest/study/business/binding_constraint_management.py @@ -22,6 +22,7 @@ NoConstraintError, WrongMatrixHeightError, ) +from antarest.core.model import JSON from antarest.core.utils.string import to_camel_case from antarest.study.business.all_optional_meta import camel_case_model from antarest.study.business.utils import execute_or_add_commands @@ -529,7 +530,7 @@ def get_binding_constraint(self, study: Study, bc_id: str) -> ConstraintOutput: file_study = storage_service.get_raw(study) config = file_study.tree.get(["input", "bindingconstraints", "bindingconstraints"]) - constraints_by_id: Dict[str, ConstraintOutput] = CaseInsensitiveDict() # type: ignore + constraints_by_id: t.Dict[str, ConstraintOutput] = CaseInsensitiveDict() # type: ignore for constraint in config.values(): constraint_config = self.constraint_model_adapter(constraint, int(study.version)) @@ -538,7 +539,7 @@ def get_binding_constraint(self, study: Study, bc_id: str) -> ConstraintOutput: if bc_id not in constraints_by_id: raise BindingConstraintNotFound(f"Binding constraint '{bc_id}' not found") - return t.cast(ConstraintOutput, constraints_by_id[bc_id]) + return constraints_by_id[bc_id] def get_binding_constraints( self, study: Study, filters: ConstraintFilters = ConstraintFilters() @@ -765,6 +766,29 @@ def update_binding_constraint( upd_constraint[field] = getattr(data, field) or getattr(existing_constraint, field) return self.constraint_model_adapter(upd_constraint, study_version) + def update_binding_constraints( + self, + study: Study, + bcs_by_ids: t.Mapping[str, ConstraintInput], + ) -> t.Mapping[str, ConstraintOutput]: + """ + Updates multiple binding constraints within a study. + + Args: + study: The study from which to update the constraints. + bcs_by_ids: A mapping of binding constraint IDs to their updated configurations. + + Returns: + A dictionary of the updated binding constraints, indexed by their IDs. + + Raises: + BindingConstraintNotFound: If any of the specified binding constraint IDs are not found. + """ + updated_constraints = {} + for bc_id, data in bcs_by_ids.items(): + updated_constraints[bc_id] = self.update_binding_constraint(study, bc_id, data) + return updated_constraints + def remove_binding_constraint(self, study: Study, binding_constraint_id: str) -> None: """ Removes a binding constraint from a study. @@ -868,6 +892,10 @@ def remove_constraint_term( ) -> None: return self.update_constraint_term(study, binding_constraint_id, term_id) # type: ignore + @staticmethod + def get_table_schema() -> JSON: + return ConstraintOutput870.schema() + def _replace_matrices_according_to_frequency_and_version( data: ConstraintInput, version: int, args: t.Dict[str, t.Any] diff --git a/antarest/study/business/link_management.py b/antarest/study/business/link_management.py index 4d65e51aba..746a998ba6 100644 --- a/antarest/study/business/link_management.py +++ b/antarest/study/business/link_management.py @@ -3,6 +3,7 @@ from pydantic import BaseModel from antarest.core.exceptions import ConfigFileNotFound +from antarest.core.model import JSON from antarest.study.business.utils import AllOptionalMetaclass, camel_case_model, execute_or_add_commands from antarest.study.model import RawStudy from antarest.study.storage.rawstudy.model.filesystem.config.links import LinkProperties @@ -27,7 +28,7 @@ class LinkInfoDTO(BaseModel): @camel_case_model -class GetLinkDTO(LinkProperties, metaclass=AllOptionalMetaclass, use_none=True): +class LinkOutput(LinkProperties, metaclass=AllOptionalMetaclass, use_none=True): """ DTO object use to get the link information. """ @@ -79,7 +80,7 @@ def delete_link(self, study: RawStudy, area1_id: str, area2_id: str) -> None: ) execute_or_add_commands(study, file_study, [command], self.storage_service) - def get_all_links_props(self, study: RawStudy) -> t.Mapping[t.Tuple[str, str], GetLinkDTO]: + def get_all_links_props(self, study: RawStudy) -> t.Mapping[t.Tuple[str, str], LinkOutput]: """ Retrieves all links properties from the study. @@ -107,15 +108,15 @@ def get_all_links_props(self, study: RawStudy) -> t.Mapping[t.Tuple[str, str], G for area2_id, properties_cfg in property_map.items(): area1_id, area2_id = sorted([area1_id, area2_id]) properties = LinkProperties(**properties_cfg) - links_by_ids[(area1_id, area2_id)] = GetLinkDTO(**properties.dict(by_alias=False)) + links_by_ids[(area1_id, area2_id)] = LinkOutput(**properties.dict(by_alias=False)) return links_by_ids def update_links_props( self, study: RawStudy, - update_links_by_ids: t.Mapping[t.Tuple[str, str], GetLinkDTO], - ) -> t.Mapping[t.Tuple[str, str], GetLinkDTO]: + update_links_by_ids: t.Mapping[t.Tuple[str, str], LinkOutput], + ) -> t.Mapping[t.Tuple[str, str], LinkOutput]: old_links_by_ids = self.get_all_links_props(study) new_links_by_ids = {} file_study = self.storage_service.get_storage(study).get_raw(study) @@ -138,3 +139,7 @@ def update_links_props( execute_or_add_commands(study, file_study, commands, self.storage_service) return new_links_by_ids + + @staticmethod + def get_table_schema() -> JSON: + return LinkOutput.schema() diff --git a/antarest/study/business/table_mode_management.py b/antarest/study/business/table_mode_management.py index 0e632e7084..3b8a514507 100644 --- a/antarest/study/business/table_mode_management.py +++ b/antarest/study/business/table_mode_management.py @@ -2,649 +2,50 @@ import typing as t import pandas as pd -from pydantic import Field -from antarest.study.business.area_management import AreaManager -from antarest.study.business.areas.renewable_management import RenewableManager, TimeSeriesInterpretation -from antarest.study.business.areas.st_storage_management import STStorageManager -from antarest.study.business.areas.thermal_management import ThermalClusterOutput, ThermalManager -from antarest.study.business.binding_constraint_management import BindingConstraintManager +from antarest.core.model import JSON +from antarest.study.business.area_management import AreaManager, AreaOutput +from antarest.study.business.areas.renewable_management import ( + RenewableClusterInput, + RenewableManager, +) +from antarest.study.business.areas.st_storage_management import STStorageInput, STStorageManager +from antarest.study.business.areas.thermal_management import ThermalClusterInput, ThermalManager +from antarest.study.business.binding_constraint_management import BindingConstraintManager, ConstraintInput from antarest.study.business.enum_ignore_case import EnumIgnoreCase -from antarest.study.business.link_management import GetLinkDTO, LinkManager -from antarest.study.business.utils import AllOptionalMetaclass, FormFieldsBaseModel -from antarest.study.common.default_values import FilteringOptions, LinkProperties, NodalOptimization +from antarest.study.business.link_management import LinkManager, LinkOutput from antarest.study.model import RawStudy -from antarest.study.storage.rawstudy.model.filesystem.config.area import AdequacyPatchMode -from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import ( - BindingConstraintFrequency, - BindingConstraintOperator, -) -from antarest.study.storage.rawstudy.model.filesystem.config.links import AssetType, TransmissionCapacity -from antarest.study.storage.rawstudy.model.filesystem.config.thermal import LawOption, LocalTSGenerationBehavior -from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy -AREA_PATH = "input/areas/{area}" -THERMAL_PATH = "input/thermal/areas" -LINK_GLOB_PATH = "input/links/{area1}/properties" -LINK_PATH = f"{LINK_GLOB_PATH}/{{area2}}" -THERMAL_CLUSTER_GLOB_PATH = "input/thermal/clusters/{area}/list" -THERMAL_CLUSTER_PATH = f"{THERMAL_CLUSTER_GLOB_PATH}/{{cluster}}" -RENEWABLE_CLUSTER_GLOB_PATH = "input/renewables/clusters/{area}/list" -RENEWABLE_CLUSTER_PATH = f"{RENEWABLE_CLUSTER_GLOB_PATH}/{{cluster}}" -BINDING_CONSTRAINT_PATH = "input/bindingconstraints/bindingconstraints" +_TableIndex = str # row name +_TableColumn = str # column name +_CellValue = t.Any # cell value (str, int, float, bool, enum, etc.) +TableDataDTO = t.Mapping[_TableIndex, t.Mapping[_TableColumn, _CellValue]] class TableTemplateType(EnumIgnoreCase): + """ + Table template types. + + This enum is used to define the different types of tables that can be created + by the user to leverage the editing capabilities of multiple objects at once. + + Attributes: + AREA: Area table. + LINK: Link table. + THERMAL_CLUSTER: Thermal clusters table. + RENEWABLE_CLUSTER: Renewable clusters table. + ST_STORAGE: Short-Term Storages table. + BINDING_CONSTRAINT: Binding constraints table. + """ + AREA = "areas" LINK = "links" THERMAL_CLUSTER = "thermals" RENEWABLE_CLUSTER = "renewables" - ST_STORAGE = "storages" - BINDING_CONSTRAINT = "constraints" - - -class AreaColumns(FormFieldsBaseModel, metaclass=AllOptionalMetaclass): - # Optimization - Nodal optimization - non_dispatchable_power: bool = Field( - default=NodalOptimization.NON_DISPATCHABLE_POWER, - path=f"{AREA_PATH}/optimization/nodal optimization/non-dispatchable-power", - ) - dispatchable_hydro_power: bool = Field( - default=NodalOptimization.DISPATCHABLE_HYDRO_POWER, - path=f"{AREA_PATH}/optimization/nodal optimization/dispatchable-hydro-power", - ) - other_dispatchable_power: bool = Field( - default=NodalOptimization.OTHER_DISPATCHABLE_POWER, - path=f"{AREA_PATH}/optimization/nodal optimization/other-dispatchable-power", - ) - average_unsupplied_energy_cost: float = Field( - default=NodalOptimization.SPREAD_UNSUPPLIED_ENERGY_COST, - path=f"{THERMAL_PATH}/unserverdenergycost/{{area}}", - ) - spread_unsupplied_energy_cost: float = Field( - default=NodalOptimization.SPREAD_UNSUPPLIED_ENERGY_COST, - path=f"{AREA_PATH}/optimization/nodal optimization/spread-unsupplied-energy-cost", - ) - average_spilled_energy_cost: float = Field( - default=NodalOptimization.SPREAD_SPILLED_ENERGY_COST, - path=f"{THERMAL_PATH}/spilledenergycost/{{area}}", - ) - spread_spilled_energy_cost: float = Field( - default=NodalOptimization.SPREAD_SPILLED_ENERGY_COST, - path=f"{AREA_PATH}/optimization/nodal optimization/spread-spilled-energy-cost", - ) - # Optimization - Filtering - filter_synthesis: str = Field( - default=FilteringOptions.FILTER_SYNTHESIS, - path=f"{AREA_PATH}/optimization/filtering/filter-synthesis", - ) - filter_year_by_year: str = Field( - default=FilteringOptions.FILTER_YEAR_BY_YEAR, - path=f"{AREA_PATH}/optimization/filtering/filter-year-by-year", - ) - # Adequacy patch - adequacy_patch_mode: AdequacyPatchMode = Field( - default=AdequacyPatchMode.OUTSIDE.value, - path=f"{AREA_PATH}/adequacy_patch/adequacy-patch/adequacy-patch-mode", - ) - - -class LinkColumns(FormFieldsBaseModel, metaclass=AllOptionalMetaclass): - hurdles_cost: bool = Field(default=LinkProperties.HURDLES_COST, path=f"{LINK_PATH}/hurdles-cost") - loop_flow: bool = Field(default=LinkProperties.LOOP_FLOW, path=f"{LINK_PATH}/loop-flow") - use_phase_shifter: bool = Field( - default=LinkProperties.USE_PHASE_SHIFTER, - path=f"{LINK_PATH}/use-phase-shifter", - ) - transmission_capacities: TransmissionCapacity = Field( - default=LinkProperties.TRANSMISSION_CAPACITIES, - path=f"{LINK_PATH}/transmission-capacities", - ) - asset_type: AssetType = Field(default=LinkProperties.ASSET_TYPE, path=f"{LINK_PATH}/asset-type") - link_style: str = Field(default=LinkProperties.LINK_STYLE, path=f"{LINK_PATH}/link-style") - link_width: int = Field(default=LinkProperties.LINK_WIDTH, path=f"{LINK_PATH}/link-width") - display_comments: bool = Field( - default=LinkProperties.DISPLAY_COMMENTS, - path=f"{LINK_PATH}/display-comments", - ) - filter_synthesis: str = Field( - default=FilteringOptions.FILTER_SYNTHESIS, - path=f"{LINK_PATH}/filter-synthesis", - ) - filter_year_by_year: str = Field( - default=FilteringOptions.FILTER_YEAR_BY_YEAR, - path=f"{LINK_PATH}/filter-year-by-year", - ) - - -class ThermalClusterColumns(FormFieldsBaseModel, metaclass=AllOptionalMetaclass): - group: str = Field( - default="", - path=f"{THERMAL_CLUSTER_PATH}/group", - ) - enabled: bool = Field( - default=True, - path=f"{THERMAL_CLUSTER_PATH}/enabled", - ) - must_run: bool = Field( - default=False, - path=f"{THERMAL_CLUSTER_PATH}/must-run", - ) - unit_count: int = Field( - default=0, - path=f"{THERMAL_CLUSTER_PATH}/unitcount", - ) - nominal_capacity: int = Field( - default=0, - path=f"{THERMAL_CLUSTER_PATH}/nominalcapacity", - ) - min_stable_power: int = Field( - default=0, - path=f"{THERMAL_CLUSTER_PATH}/min-stable-power", - ) - spinning: int = Field( - default=0, - path=f"{THERMAL_CLUSTER_PATH}/spinning", - ) - min_up_time: int = Field( - default=1, - path=f"{THERMAL_CLUSTER_PATH}/min-up-time", - ) - min_down_time: int = Field( - default=1, - path=f"{THERMAL_CLUSTER_PATH}/min-down-time", - ) - marginal_cost: int = Field( - default=0, - path=f"{THERMAL_CLUSTER_PATH}/marginal-cost", - ) - fixed_cost: int = Field( - default=0, - path=f"{THERMAL_CLUSTER_PATH}/fixed-cost", - ) - startup_cost: int = Field( - default=0, - path=f"{THERMAL_CLUSTER_PATH}/startup-cost", - ) - market_bid_cost: int = Field( - default=0, - path=f"{THERMAL_CLUSTER_PATH}/market-bid-cost", - ) - spread_cost: int = Field( - default=0, - path=f"{THERMAL_CLUSTER_PATH}/spread-cost", - ) - ts_gen: LocalTSGenerationBehavior = Field( - default=LocalTSGenerationBehavior.USE_GLOBAL.value, - path=f"{THERMAL_CLUSTER_PATH}/gen-ts", - ) - volatility_forced: int = Field( - default=0, - path=f"{THERMAL_CLUSTER_PATH}/volatility.forced", - ) - volatility_planned: int = Field( - default=0, - path=f"{THERMAL_CLUSTER_PATH}/volatility.planned", - ) - law_forced: LawOption = Field( - default=LawOption.UNIFORM.value, - path=f"{THERMAL_CLUSTER_PATH}/law.forced", - ) - law_planned: LawOption = Field( - default=LawOption.UNIFORM.value, - path=f"{THERMAL_CLUSTER_PATH}/law.planned", - ) - # Pollutants - co2: float = Field( - default=0.0, - path=f"{THERMAL_CLUSTER_PATH}/co2", - ) - so2: float = Field( - default=0.0, - path=f"{THERMAL_CLUSTER_PATH}/so2", - ) - nh3: float = Field( - default=0.0, - path=f"{THERMAL_CLUSTER_PATH}/nh3", - ) - nox: float = Field( - default=0.0, - path=f"{THERMAL_CLUSTER_PATH}/nox", - ) - nmvoc: float = Field( - default=0.0, - path=f"{THERMAL_CLUSTER_PATH}/nmvoc", - ) - pm25: float = Field( - default=0.0, - path=f"{THERMAL_CLUSTER_PATH}/pm2_5", - ) - pm5: float = Field( - default=0.0, - path=f"{THERMAL_CLUSTER_PATH}/pm5", - ) - pm10: float = Field( - default=0.0, - path=f"{THERMAL_CLUSTER_PATH}/pm10", - ) - op1: float = Field( - default=0.0, - path=f"{THERMAL_CLUSTER_PATH}/op1", - ) - op2: float = Field( - default=0.0, - path=f"{THERMAL_CLUSTER_PATH}/op2", - ) - op3: float = Field( - default=0.0, - path=f"{THERMAL_CLUSTER_PATH}/op3", - ) - op4: float = Field( - default=0.0, - path=f"{THERMAL_CLUSTER_PATH}/op4", - ) - op5: float = Field( - default=0.0, - path=f"{THERMAL_CLUSTER_PATH}/op5", - ) - - -class RenewableClusterColumns(FormFieldsBaseModel, metaclass=AllOptionalMetaclass): - group: str = Field(default="", path=f"{RENEWABLE_CLUSTER_PATH}/group") - ts_interpretation: TimeSeriesInterpretation = Field( - default=TimeSeriesInterpretation.POWER_GENERATION.value, - path=f"{RENEWABLE_CLUSTER_PATH}/ts-interpretation", - ) - enabled: bool = Field(default=True, path=f"{RENEWABLE_CLUSTER_PATH}/enabled") - unit_count: int = Field(default=0, path=f"{RENEWABLE_CLUSTER_PATH}/unitcount") - nominal_capacity: int = Field(default=0, path=f"{RENEWABLE_CLUSTER_PATH}/nominalcapacity") - - -class BindingConstraintColumns(FormFieldsBaseModel, metaclass=AllOptionalMetaclass): - type: BindingConstraintFrequency = Field( - default=BindingConstraintFrequency.HOURLY.value, - path=f"{BINDING_CONSTRAINT_PATH}/type", - ) - operator: BindingConstraintOperator = Field( - default=BindingConstraintOperator.LESS.value, - path=f"{BINDING_CONSTRAINT_PATH}/operator", - ) - enabled: bool = Field( - default=True, - path=f"{BINDING_CONSTRAINT_PATH}/enabled", - ) - group: t.Optional[str] = Field( - default="default", - path=f"{BINDING_CONSTRAINT_PATH}/group", - ) - - -class ColumnInfo(t.TypedDict): - path: str - default_value: t.Any - - -class PathVars(t.TypedDict, total=False): - # Area - id: str - # Link - area1: str - area2: str - # Thermal cluster, Renewable cluster - area: str - cluster: str - - -FIELDS_INFO_BY_TYPE: t.Dict[TableTemplateType, t.Dict[str, ColumnInfo]] = { - TableTemplateType.AREA: { - "non_dispatchable_power": { - "path": f"{AREA_PATH}/optimization/nodal optimization/non-dispatchable-power", - "default_value": NodalOptimization.NON_DISPATCHABLE_POWER, - }, - "dispatchable_hydro_power": { - "path": f"{AREA_PATH}/optimization/nodal optimization/dispatchable-hydro-power", - "default_value": NodalOptimization.DISPATCHABLE_HYDRO_POWER, - }, - "other_dispatchable_power": { - "path": f"{AREA_PATH}/optimization/nodal optimization/other-dispatchable-power", - "default_value": NodalOptimization.OTHER_DISPATCHABLE_POWER, - }, - "average_unsupplied_energy_cost": { - "path": f"{THERMAL_PATH}/unserverdenergycost/{{area}}", - "default_value": NodalOptimization.SPREAD_UNSUPPLIED_ENERGY_COST, - }, - "spread_unsupplied_energy_cost": { - "path": f"{AREA_PATH}/optimization/nodal optimization/spread-unsupplied-energy-cost", - "default_value": NodalOptimization.SPREAD_UNSUPPLIED_ENERGY_COST, - }, - "average_spilled_energy_cost": { - "path": f"{THERMAL_PATH}/spilledenergycost/{{area}}", - "default_value": NodalOptimization.SPREAD_SPILLED_ENERGY_COST, - }, - "spread_spilled_energy_cost": { - "path": f"{AREA_PATH}/optimization/nodal optimization/spread-spilled-energy-cost", - "default_value": NodalOptimization.SPREAD_SPILLED_ENERGY_COST, - }, - "filter_synthesis": { - "path": f"{AREA_PATH}/optimization/filtering/filter-synthesis", - "default_value": FilteringOptions.FILTER_SYNTHESIS, - }, - "filter_year_by_year": { - "path": f"{AREA_PATH}/optimization/filtering/filter-year-by-year", - "default_value": FilteringOptions.FILTER_YEAR_BY_YEAR, - }, - "adequacy_patch_mode": { - "path": f"{AREA_PATH}/adequacy_patch/adequacy-patch/adequacy-patch-mode", - "default_value": AdequacyPatchMode.OUTSIDE.value, - }, - }, - TableTemplateType.LINK: { - "hurdles_cost": { - "path": f"{LINK_PATH}/hurdles-cost", - "default_value": LinkProperties.HURDLES_COST, - }, - "loop_flow": { - "path": f"{LINK_PATH}/loop-flow", - "default_value": LinkProperties.LOOP_FLOW, - }, - "use_phase_shifter": { - "path": f"{LINK_PATH}/use-phase-shifter", - "default_value": LinkProperties.USE_PHASE_SHIFTER, - }, - "transmission_capacities": { - "path": f"{LINK_PATH}/transmission-capacities", - "default_value": LinkProperties.TRANSMISSION_CAPACITIES, - }, - "asset_type": { - "path": f"{LINK_PATH}/asset-type", - "default_value": LinkProperties.ASSET_TYPE, - }, - "link_style": { - "path": f"{LINK_PATH}/link-style", - "default_value": LinkProperties.LINK_STYLE, - }, - "link_width": { - "path": f"{LINK_PATH}/link-width", - "default_value": LinkProperties.LINK_WIDTH, - }, - "display_comments": { - "path": f"{LINK_PATH}/display-comments", - "default_value": LinkProperties.DISPLAY_COMMENTS, - }, - "filter_synthesis": { - "path": f"{LINK_PATH}/filter-synthesis", - "default_value": FilteringOptions.FILTER_SYNTHESIS, - }, - "filter_year_by_year": { - "path": f"{LINK_PATH}/filter-year-by-year", - "default_value": FilteringOptions.FILTER_YEAR_BY_YEAR, - }, - }, - TableTemplateType.THERMAL_CLUSTER: { - "group": { - "path": f"{THERMAL_CLUSTER_PATH}/group", - "default_value": "", - }, - "enabled": { - "path": f"{THERMAL_CLUSTER_PATH}/enabled", - "default_value": True, - }, - "must_run": { - "path": f"{THERMAL_CLUSTER_PATH}/must-run", - "default_value": False, - }, - "unit_count": { - "path": f"{THERMAL_CLUSTER_PATH}/unitcount", - "default_value": 0, - }, - "nominal_capacity": { - "path": f"{THERMAL_CLUSTER_PATH}/nominalcapacity", - "default_value": 0, - }, - "min_stable_power": { - "path": f"{THERMAL_CLUSTER_PATH}/min-stable-power", - "default_value": 0, - }, - "spinning": { - "path": f"{THERMAL_CLUSTER_PATH}/spinning", - "default_value": 0, - }, - "min_up_time": { - "path": f"{THERMAL_CLUSTER_PATH}/min-up-time", - "default_value": 1, - }, - "min_down_time": { - "path": f"{THERMAL_CLUSTER_PATH}/min-down-time", - "default_value": 1, - }, - "co2": { - "path": f"{THERMAL_CLUSTER_PATH}/co2", - "default_value": 0, - }, - "marginal_cost": { - "path": f"{THERMAL_CLUSTER_PATH}/marginal-cost", - "default_value": 0, - }, - "fixed_cost": { - "path": f"{THERMAL_CLUSTER_PATH}/fixed-cost", - "default_value": 0, - }, - "startup_cost": { - "path": f"{THERMAL_CLUSTER_PATH}/startup-cost", - "default_value": 0, - }, - "market_bid_cost": { - "path": f"{THERMAL_CLUSTER_PATH}/market-bid-cost", - "default_value": 0, - }, - "spread_cost": { - "path": f"{THERMAL_CLUSTER_PATH}/spread-cost", - "default_value": 0, - }, - "ts_gen": { - "path": f"{THERMAL_CLUSTER_PATH}/gen-ts", - "default_value": LocalTSGenerationBehavior.USE_GLOBAL.value, - }, - "volatility_forced": { - "path": f"{THERMAL_CLUSTER_PATH}/volatility.forced", - "default_value": 0, - }, - "volatility_planned": { - "path": f"{THERMAL_CLUSTER_PATH}/volatility.planned", - "default_value": 0, - }, - "law_forced": { - "path": f"{THERMAL_CLUSTER_PATH}/law.forced", - "default_value": LawOption.UNIFORM.value, - }, - "law_planned": { - "path": f"{THERMAL_CLUSTER_PATH}/law.planned", - "default_value": LawOption.UNIFORM.value, - }, - }, - TableTemplateType.RENEWABLE_CLUSTER: { - "group": { - "path": f"{RENEWABLE_CLUSTER_PATH}/group", - "default_value": "", - }, - "ts_interpretation": { - "path": f"{RENEWABLE_CLUSTER_PATH}/ts-interpretation", - "default_value": TimeSeriesInterpretation.POWER_GENERATION.value, - }, - "enabled": { - "path": f"{RENEWABLE_CLUSTER_PATH}/enabled", - "default_value": True, - }, - "unit_count": { - "path": f"{RENEWABLE_CLUSTER_PATH}/unitcount", - "default_value": 0, - }, - "nominal_capacity": { - "path": f"{RENEWABLE_CLUSTER_PATH}/nominalcapacity", - "default_value": 0, - }, - }, - TableTemplateType.BINDING_CONSTRAINT: { - "type": { - "path": f"{BINDING_CONSTRAINT_PATH}/type", - "default_value": BindingConstraintFrequency.HOURLY.value, - }, - "operator": { - "path": f"{BINDING_CONSTRAINT_PATH}/operator", - "default_value": BindingConstraintOperator.LESS.value, - }, - "enabled": { - "path": f"{BINDING_CONSTRAINT_PATH}/enabled", - "default_value": True, - }, - "group": { - "path": f"{BINDING_CONSTRAINT_PATH}/group", - "default_value": None, - }, - }, -} - -COLUMNS_MODELS_BY_TYPE = { - TableTemplateType.AREA: AreaColumns, - TableTemplateType.LINK: LinkColumns, - TableTemplateType.THERMAL_CLUSTER: ThermalClusterColumns, - TableTemplateType.RENEWABLE_CLUSTER: RenewableClusterColumns, - TableTemplateType.BINDING_CONSTRAINT: BindingConstraintColumns, -} - -ColumnsModelTypes = t.Union[ - AreaColumns, - LinkColumns, - ThermalClusterColumns, - RenewableClusterColumns, - BindingConstraintColumns, -] - - -def _get_glob_object(file_study: FileStudy, table_type: TableTemplateType) -> t.Dict[str, t.Any]: - """ - Retrieves the fields of an object according to its type (area, link, thermal cluster...). - - Args: - file_study: A file study from which the configuration can be read. - table_type: Type of the object. - - Returns: - Dictionary containing the fields used in Table mode. - - Raises: - ChildNotFoundError: if one of the Area IDs is not found in the configuration. - """ - # sourcery skip: extract-method - if table_type == TableTemplateType.AREA: - info_map: t.Dict[str, t.Any] = file_study.tree.get(url=AREA_PATH.format(area="*").split("/"), depth=3) - area_ids = list(file_study.config.areas) - # If there is only one ID in the `area_ids`, the result returned from - # the `file_study.tree.get` call will be a single object. - # On the other hand, if there are multiple values in `area_ids`, - # the result will be a dictionary where the keys are the IDs, - # and the values are the corresponding objects. - if len(area_ids) == 1: - info_map = {area_ids[0]: info_map} - # Add thermal fields in info_map - thermal_fields = file_study.tree.get(THERMAL_PATH.split("/")) - for field, field_props in thermal_fields.items(): - for area_id, value in field_props.items(): - if area_id in info_map: - info_map[area_id][field] = value - return info_map - - if table_type == TableTemplateType.LINK: - return file_study.tree.get(LINK_GLOB_PATH.format(area1="*").split("/")) - if table_type == TableTemplateType.THERMAL_CLUSTER: - return file_study.tree.get(THERMAL_CLUSTER_GLOB_PATH.format(area="*").split("/")) - if table_type == TableTemplateType.RENEWABLE_CLUSTER: - return file_study.tree.get(RENEWABLE_CLUSTER_GLOB_PATH.format(area="*").split("/")) - if table_type == TableTemplateType.BINDING_CONSTRAINT: - return file_study.tree.get(BINDING_CONSTRAINT_PATH.split("/")) - - return {} - - -def _get_value(path: t.List[str], data: t.Dict[str, t.Any], default_value: t.Any) -> t.Any: - if len(path): - return _get_value(path[1:], data.get(path[0], {}), default_value) - return data if data != {} else default_value - - -def _get_relative_path( - table_type: TableTemplateType, - path: str, -) -> t.List[str]: - base_path = "" - path_arr = path.split("/") - - if table_type == TableTemplateType.AREA: - if path.startswith(THERMAL_PATH): - base_path = THERMAL_PATH - # Remove {area} - path_arr = path_arr[:-1] - else: - base_path = AREA_PATH - elif table_type == TableTemplateType.LINK: - base_path = LINK_PATH - elif table_type == TableTemplateType.THERMAL_CLUSTER: - base_path = THERMAL_CLUSTER_PATH - elif table_type == TableTemplateType.RENEWABLE_CLUSTER: - base_path = RENEWABLE_CLUSTER_PATH - elif table_type == TableTemplateType.BINDING_CONSTRAINT: - base_path = BINDING_CONSTRAINT_PATH - - return path_arr[len(base_path.split("/")) :] - - -def _get_column_path( - table_type: TableTemplateType, - column: str, - path_vars: PathVars, -) -> str: - columns_model = COLUMNS_MODELS_BY_TYPE[table_type] - path = t.cast(str, columns_model.__fields__[column].field_info.extra["path"]) - - if table_type == TableTemplateType.AREA: - return path.format(area=path_vars["id"]) - if table_type == TableTemplateType.LINK: - return path.format(area1=path_vars["area1"], area2=path_vars["area2"]) - if table_type in [ - TableTemplateType.THERMAL_CLUSTER, - TableTemplateType.RENEWABLE_CLUSTER, - ]: - return path.format(area=path_vars["area"], cluster=path_vars["cluster"]) - - return path - - -def _get_path_vars_from_key( - table_type: TableTemplateType, - key: str, -) -> PathVars: - if table_type in [ - TableTemplateType.AREA, - TableTemplateType.BINDING_CONSTRAINT, - ]: - return PathVars(id=key) - if table_type == TableTemplateType.LINK: - area1, area2 = [v.strip() for v in key.split("/")] - return PathVars(area1=area1, area2=area2) - if table_type in [ - TableTemplateType.THERMAL_CLUSTER, - TableTemplateType.RENEWABLE_CLUSTER, - ]: - area, cluster = [v.strip() for v in key.split("/")] - return PathVars(area=area, cluster=cluster) - - return PathVars() - - -_TableIndex = str # row name -_TableColumn = str # column name -_CellValue = t.Any # cell value (str, int, float, bool, enum, etc.) -TableDataDTO = t.Mapping[_TableIndex, t.Mapping[_TableColumn, _CellValue]] + # Avoid "storages" because we may have "lt-storages" (long-term storages) in the future + ST_STORAGE = "st-storages" + # Avoid "constraints" because we may have other kinds of constraints in the future + BINDING_CONSTRAINT = "binding-constraints" class TableModeManager: @@ -679,25 +80,25 @@ def get_table_data( f"{area1_id} / {area2_id}": link.dict(by_alias=True) for (area1_id, area2_id), link in links_map.items() } elif table_type == TableTemplateType.THERMAL_CLUSTER: - thermals_map = self._thermal_manager.get_all_thermals_props(study) + thermals_by_areas = self._thermal_manager.get_all_thermals_props(study) data = { - f"{area_id} / {cluster.id}": cluster.dict(by_alias=True) - for area_id, clusters in thermals_map.items() - for cluster in clusters + f"{area_id} / {storage_id}": storage.dict(by_alias=True) + for area_id, thermals_by_ids in thermals_by_areas.items() + for storage_id, storage in thermals_by_ids.items() } elif table_type == TableTemplateType.RENEWABLE_CLUSTER: - renewables_map = self._renewable_manager.get_all_renewables_props(study) + renewables_by_areas = self._renewable_manager.get_all_renewables_props(study) data = { - f"{area_id} / {cluster.id}": cluster.dict(by_alias=True) - for area_id, clusters in renewables_map.items() - for cluster in clusters + f"{area_id} / {storage_id}": storage.dict(by_alias=True) + for area_id, renewables_by_ids in renewables_by_areas.items() + for storage_id, storage in renewables_by_ids.items() } elif table_type == TableTemplateType.ST_STORAGE: - storages_map = self._st_storage_manager.get_all_storages_props(study) + storages_by_areas = self._st_storage_manager.get_all_storages_props(study) data = { - f"{area_id} / {storage.id}": storage.dict(by_alias=True) - for area_id, storages in storages_map.items() - for storage in storages + f"{area_id} / {storage_id}": storage.dict(by_alias=True) + for area_id, storages_by_ids in storages_by_areas.items() + for storage_id, storage in storages_by_ids.items() } elif table_type == TableTemplateType.BINDING_CONSTRAINT: bc_seq = self._binding_constraint_manager.get_binding_constraints(study) @@ -710,7 +111,8 @@ def get_table_data( # Create a new dataframe with the listed columns. # If a column does not exist in the DataFrame, it is created with empty values. df = pd.DataFrame(df, columns=columns) # type: ignore - df = df.where(pd.notna(df), other=None) + # noinspection PyTypeChecker + df = df.where(pd.notna(df), other=None) # obj = df.to_dict(orient="index") @@ -753,16 +155,20 @@ def get_column_value(col: str, data: t.Dict[str, t.Any]) -> t.Any: return obj - def set_table_data( + def update_table_data( self, study: RawStudy, table_type: TableTemplateType, data: TableDataDTO, ) -> TableDataDTO: if table_type == TableTemplateType.AREA: - return {} + # Use AreaOutput to update properties of areas + area_props_by_ids = {key: AreaOutput(**values) for key, values in data.items()} + areas_map = self._area_manager.update_areas_props(study, area_props_by_ids) + data = {area_id: area.dict(by_alias=True) for area_id, area in areas_map.items()} + return data elif table_type == TableTemplateType.LINK: - links_map = {tuple(key.split(" / ")): GetLinkDTO(**values) for key, values in data.items()} + links_map = {tuple(key.split(" / ")): LinkOutput(**values) for key, values in data.items()} updated_map = self._link_manager.update_links_props(study, links_map) # type: ignore data = { f"{area1_id} / {area2_id}": link.dict(by_alias=True) @@ -770,22 +176,72 @@ def set_table_data( } return data elif table_type == TableTemplateType.THERMAL_CLUSTER: - thermals_by_areas = collections.defaultdict(list) + thermals_by_areas: t.MutableMapping[str, t.MutableMapping[str, ThermalClusterInput]] + thermals_by_areas = collections.defaultdict(dict) for key, values in data.items(): area_id, cluster_id = key.split(" / ") - thermals_by_areas[area_id].append(ThermalClusterOutput(**values, id=cluster_id)) + thermals_by_areas[area_id][cluster_id] = ThermalClusterInput(**values) thermals_map = self._thermal_manager.update_thermals_props(study, thermals_by_areas) data = { - f"{area_id} / {cluster.id}": cluster.dict(by_alias=True) - for area_id, clusters in thermals_map.items() - for cluster in clusters + f"{area_id} / {cluster_id}": cluster.dict(by_alias=True) + for area_id, thermals_by_ids in thermals_map.items() + for cluster_id, cluster in thermals_by_ids.items() } return data elif table_type == TableTemplateType.RENEWABLE_CLUSTER: - return {} + renewables_by_areas: t.MutableMapping[str, t.MutableMapping[str, RenewableClusterInput]] + renewables_by_areas = collections.defaultdict(dict) + for key, values in data.items(): + area_id, cluster_id = key.split(" / ") + renewables_by_areas[area_id][cluster_id] = RenewableClusterInput(**values) + renewables_map = self._renewable_manager.update_renewables_props(study, renewables_by_areas) + data = { + f"{area_id} / {cluster_id}": cluster.dict(by_alias=True) + for area_id, renewables_by_ids in renewables_map.items() + for cluster_id, cluster in renewables_by_ids.items() + } + return data + elif table_type == TableTemplateType.ST_STORAGE: + storages_by_areas: t.MutableMapping[str, t.MutableMapping[str, STStorageInput]] + storages_by_areas = collections.defaultdict(dict) + for key, values in data.items(): + area_id, cluster_id = key.split(" / ") + storages_by_areas[area_id][cluster_id] = STStorageInput(**values) + storages_map = self._st_storage_manager.update_storages_props(study, storages_by_areas) + data = { + f"{area_id} / {cluster_id}": cluster.dict(by_alias=True) + for area_id, storages_by_ids in storages_map.items() + for cluster_id, cluster in storages_by_ids.items() + } + return data + elif table_type == TableTemplateType.BINDING_CONSTRAINT: + bcs_by_ids = {key: ConstraintInput(**values) for key, values in data.items()} + bcs_map = self._binding_constraint_manager.update_binding_constraints(study, bcs_by_ids) + return {bc_id: bc.dict(by_alias=True, exclude={"id", "name", "terms"}) for bc_id, bc in bcs_map.items()} + else: # pragma: no cover + raise NotImplementedError(f"Table type {table_type} not implemented") + + def get_table_schema(self, table_type: TableTemplateType) -> JSON: + """ + Get the properties of the table columns which type is provided as a parameter. + + Args: + table_type: The type of the table. + + Returns: + JSON Schema which allows to know the name, title and type of each column. + """ + if table_type == TableTemplateType.AREA: + return self._area_manager.get_table_schema() + elif table_type == TableTemplateType.LINK: + return self._link_manager.get_table_schema() + elif table_type == TableTemplateType.THERMAL_CLUSTER: + return self._thermal_manager.get_table_schema() + elif table_type == TableTemplateType.RENEWABLE_CLUSTER: + return self._renewable_manager.get_table_schema() elif table_type == TableTemplateType.ST_STORAGE: - return {} + return self._st_storage_manager.get_table_schema() elif table_type == TableTemplateType.BINDING_CONSTRAINT: - return {} + return self._binding_constraint_manager.get_table_schema() else: # pragma: no cover raise NotImplementedError(f"Table type {table_type} not implemented") diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/area.py b/antarest/study/storage/rawstudy/model/filesystem/config/area.py index 52c70540b0..458f1185aa 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/config/area.py +++ b/antarest/study/storage/rawstudy/model/filesystem/config/area.py @@ -184,7 +184,7 @@ def to_config(self) -> t.Mapping[str, t.Any]: >>> from antarest.study.storage.rawstudy.model.filesystem.config.area import AreaUI >>> from pprint import pprint - >>> ui = AreaUI(x=1148, y=144, color_rgb='#0080FF') + >>> ui = AreaUI(x=1148, y=144, color_rgb="#0080FF") >>> pprint(ui.to_config(), width=80) {'color_b': 255, 'color_g': 128, 'color_r': 0, 'x': 1148, 'y': 144} """ @@ -260,8 +260,8 @@ class UIProperties(IniProperties): ) @root_validator(pre=True) - def _validate_layers(cls, values: t.MutableMapping[str, t.Any]) -> t.Mapping[str, t.Any]: - # Defined the default style if missing + def _set_default_style(cls, values: t.MutableMapping[str, t.Any]) -> t.Mapping[str, t.Any]: + """Defined the default style if missing.""" style = values.get("style") if style is None: values["style"] = AreaUI() @@ -269,13 +269,19 @@ def _validate_layers(cls, values: t.MutableMapping[str, t.Any]) -> t.Mapping[str values["style"] = AreaUI(**style) else: values["style"] = AreaUI(**style.dict()) + return values - # Define the default layers if missing - layers = values.get("layers") - if layers is None: + @root_validator(pre=True) + def _set_default_layers(cls, values: t.MutableMapping[str, t.Any]) -> t.Mapping[str, t.Any]: + """Define the default layers if missing.""" + _layers = values.get("layers") + if _layers is None: values["layers"] = {0} + return values - # Define the default layer styles if missing + @root_validator(pre=True) + def _set_default_layer_styles(cls, values: t.MutableMapping[str, t.Any]) -> t.Mapping[str, t.Any]: + """Define the default layer styles if missing.""" layer_styles = values.get("layer_styles") if layer_styles is None: values["layer_styles"] = {0: AreaUI()} @@ -289,7 +295,10 @@ def _validate_layers(cls, values: t.MutableMapping[str, t.Any]) -> t.Mapping[str values["layer_styles"][key] = AreaUI(**style.dict()) else: raise TypeError(f"Invalid type for layer_styles: {type(layer_styles)}") + return values + @root_validator(pre=True) + def _validate_layers(cls, values: t.MutableMapping[str, t.Any]) -> t.Mapping[str, t.Any]: # Parse the `[ui]` section (if any) ui_section = values.pop("ui", {}) if ui_section: @@ -341,9 +350,10 @@ def to_config(self) -> t.Mapping[str, t.Mapping[str, t.Any]]: ... style=AreaUI(x=1148, y=144, color_rgb=(0, 128, 255)), ... layers={0, 7}, ... layer_styles={ - ... 6: AreaUI(x=1148, y=144, color_rgb='#C0A863'), + ... 6: AreaUI(x=1148, y=144, color_rgb="#C0A863"), ... 7: AreaUI(x=18, y=-22, color_rgb=(0, 128, 255)), - ... }) + ... }, + ... ) >>> pprint(ui.to_config(), width=80) {'layerColor': {'0': '230, 108, 44', '6': '192, 168, 99', '7': '0, 128, 255'}, 'layerX': {'0': 0, '6': 1148, '7': 18}, diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/cluster.py b/antarest/study/storage/rawstudy/model/filesystem/config/cluster.py index 4563a0d217..1c84019294 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/config/cluster.py +++ b/antarest/study/storage/rawstudy/model/filesystem/config/cluster.py @@ -3,13 +3,12 @@ In the near future, this set of classes may be used for solar, wind and hydro clusters. """ + import functools import typing as t from pydantic import BaseModel, Extra, Field -__all__ = ("ItemProperties", "ClusterProperties") - @functools.total_ordering class ItemProperties( @@ -69,10 +68,16 @@ class ClusterProperties(ItemProperties): # Activity status: # - True: the plant may generate. # - False: not yet commissioned, moth-balled, etc. - enabled: bool = Field(default=True, description="Activity status") + enabled: bool = Field(default=True, description="Activity status", title="Enabled") # noinspection SpellCheckingInspection - unit_count: int = Field(default=1, ge=1, description="Unit count", alias="unitcount") + unit_count: int = Field( + default=1, + ge=1, + description="Unit count", + alias="unitcount", + title="Unit Count", + ) # noinspection SpellCheckingInspection nominal_capacity: float = Field( @@ -80,6 +85,7 @@ class ClusterProperties(ItemProperties): ge=0, description="Nominal capacity (MW per unit)", alias="nominalcapacity", + title="Nominal Capacity", ) @property diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/ini_properties.py b/antarest/study/storage/rawstudy/model/filesystem/config/ini_properties.py index 8c113ce164..51f10a5ca5 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/config/ini_properties.py +++ b/antarest/study/storage/rawstudy/model/filesystem/config/ini_properties.py @@ -6,10 +6,10 @@ class IniProperties( BaseModel, - # On reading, if the configuration contains an extra field, it is ignored. - # This allows to read configurations that contain extra fields - # that are not yet managed by the code or that are deprecated. - extra=Extra.ignore, + # On reading, if the configuration contains an extra field, it is better + # to forbid it, because it allows errors to be detected early. + # Ignoring extra attributes can hide errors. + extra=Extra.forbid, # If a field is updated on assignment, it is also validated. validate_assignment=True, # On testing, we can use snake_case for field names. diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/links.py b/antarest/study/storage/rawstudy/model/filesystem/config/links.py index ddd5d9abb9..979c9a6d09 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/config/links.py +++ b/antarest/study/storage/rawstudy/model/filesystem/config/links.py @@ -1,6 +1,7 @@ """ Object model used to read and update link configuration. """ + import typing as t from pydantic import Field, root_validator, validator diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/renewable.py b/antarest/study/storage/rawstudy/model/filesystem/config/renewable.py index 57beb01b29..ed0716147a 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/config/renewable.py +++ b/antarest/study/storage/rawstudy/model/filesystem/config/renewable.py @@ -6,16 +6,6 @@ from antarest.study.storage.rawstudy.model.filesystem.config.cluster import ClusterProperties from antarest.study.storage.rawstudy.model.filesystem.config.identifier import IgnoreCaseIdentifier -__all__ = ( - "RenewableClusterGroup", - "RenewableConfig", - "RenewableConfigType", - "RenewableProperties", - "TimeSeriesInterpretation", - "create_renewable_config", - "RenewableClusterGroup", -) - class TimeSeriesInterpretation(EnumIgnoreCase): """ @@ -74,11 +64,13 @@ class RenewableProperties(ClusterProperties): """ group: RenewableClusterGroup = Field( + title="Renewable Cluster Group", default=RenewableClusterGroup.OTHER1, description="Renewable Cluster Group", ) ts_interpretation: TimeSeriesInterpretation = Field( + title="Time Series Interpretation", default=TimeSeriesInterpretation.POWER_GENERATION, description="Time series interpretation", alias="ts-interpretation", @@ -106,6 +98,22 @@ class RenewableConfig(RenewableProperties, IgnoreCaseIdentifier): RenewableConfigType = RenewableConfig +def get_renewable_config_cls(study_version: t.Union[str, int]) -> t.Type[RenewableConfig]: + """ + Retrieves the renewable configuration class based on the study version. + + Args: + study_version: The version of the study. + + Returns: + The renewable configuration class. + """ + version = int(study_version) + if version >= 810: + return RenewableConfig + raise ValueError(f"Unsupported study version {study_version}, required 810 or above.") + + def create_renewable_config(study_version: t.Union[str, int], **kwargs: t.Any) -> RenewableConfigType: """ Factory method to create a renewable configuration model. @@ -120,4 +128,5 @@ def create_renewable_config(study_version: t.Union[str, int], **kwargs: t.Any) - Raises: ValueError: If the study version is not supported. """ - return RenewableConfig(**kwargs) + cls = get_renewable_config_cls(study_version) + return cls(**kwargs) diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/st_storage.py b/antarest/study/storage/rawstudy/model/filesystem/config/st_storage.py index 61a644b3a5..9d8dd72229 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/config/st_storage.py +++ b/antarest/study/storage/rawstudy/model/filesystem/config/st_storage.py @@ -42,30 +42,35 @@ class STStorageProperties(ItemProperties): group: STStorageGroup = Field( STStorageGroup.OTHER1, description="Energy storage system group", + title="Short-Term Storage Group", ) injection_nominal_capacity: float = Field( 0, description="Injection nominal capacity (MW)", ge=0, alias="injectionnominalcapacity", + title="Injection Nominal Capacity", ) withdrawal_nominal_capacity: float = Field( 0, description="Withdrawal nominal capacity (MW)", ge=0, alias="withdrawalnominalcapacity", + title="Withdrawal Nominal Capacity", ) reservoir_capacity: float = Field( 0, description="Reservoir capacity (MWh)", ge=0, alias="reservoircapacity", + title="Reservoir Capacity", ) efficiency: float = Field( 1, description="Efficiency of the storage system (%)", ge=0, le=1, + title="Efficiency", ) # The `initial_level` value must be between 0 and 1, but the default value is 0.5 initial_level: float = Field( @@ -74,11 +79,13 @@ class STStorageProperties(ItemProperties): ge=0, le=1, alias="initiallevel", + title="Initial Level", ) initial_level_optim: bool = Field( False, description="Flag indicating if the initial level is optimized", alias="initialleveloptim", + title="Initial Level Optimization", ) @@ -142,6 +149,24 @@ class STStorage880Config(STStorage880Properties, LowerCaseIdentifier): STStorageConfigType = t.Union[STStorageConfig, STStorage880Config] +def get_st_storage_config_cls(study_version: t.Union[str, int]) -> t.Type[STStorageConfig]: + """ + Retrieves the short-term storage configuration class based on the study version. + + Args: + study_version: The version of the study. + + Returns: + The short-term storage configuration class. + """ + version = int(study_version) + if version >= 880: + return STStorage880Config + elif version >= 860: + return STStorageConfig + raise ValueError(f"Unsupported study version: {version}") + + def create_st_storage_config(study_version: t.Union[str, int], **kwargs: t.Any) -> STStorageConfigType: """ Factory method to create a short-term storage configuration model. @@ -156,9 +181,5 @@ def create_st_storage_config(study_version: t.Union[str, int], **kwargs: t.Any) Raises: ValueError: If the study version is not supported. """ - version = int(study_version) - if version < 860: - raise ValueError(f"Unsupported study version: {version}") - elif version < 880: - return STStorageConfig(**kwargs) - return STStorage880Config(**kwargs) + cls = get_st_storage_config_cls(study_version) + return cls(**kwargs) diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/thermal.py b/antarest/study/storage/rawstudy/model/filesystem/config/thermal.py index f2a810025a..dcd0bc7729 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/config/thermal.py +++ b/antarest/study/storage/rawstudy/model/filesystem/config/thermal.py @@ -6,20 +6,6 @@ from antarest.study.storage.rawstudy.model.filesystem.config.cluster import ClusterProperties from antarest.study.storage.rawstudy.model.filesystem.config.identifier import IgnoreCaseIdentifier -__all__ = ( - "LawOption", - "LocalTSGenerationBehavior", - "Thermal860Config", - "Thermal870Config", - "Thermal870Properties", - "ThermalClusterGroup", - "ThermalConfig", - "ThermalConfigType", - "ThermalCostGeneration", - "ThermalProperties", - "create_thermal_config", -) - class LocalTSGenerationBehavior(EnumIgnoreCase): """ @@ -108,17 +94,20 @@ class ThermalProperties(ClusterProperties): group: ThermalClusterGroup = Field( default=ThermalClusterGroup.OTHER1, description="Thermal Cluster Group", + title="Thermal Cluster Group", ) gen_ts: LocalTSGenerationBehavior = Field( default=LocalTSGenerationBehavior.USE_GLOBAL, description="Time Series Generation Option", alias="gen-ts", + title="Time Series Generation", ) min_stable_power: float = Field( default=0.0, description="Min. Stable Power (MW)", alias="min-stable-power", + title="Min. Stable Power", ) min_up_time: int = Field( default=1, @@ -126,6 +115,7 @@ class ThermalProperties(ClusterProperties): le=168, description="Min. Up time (h)", alias="min-up-time", + title="Min. Up Time", ) min_down_time: int = Field( default=1, @@ -133,17 +123,20 @@ class ThermalProperties(ClusterProperties): le=168, description="Min. Down time (h)", alias="min-down-time", + title="Min. Down Time", ) must_run: bool = Field( default=False, description="Must run flag", alias="must-run", + title="Must Run", ) spinning: float = Field( default=0.0, ge=0, le=100, description="Spinning (%)", + title="Spinning", ) volatility_forced: float = Field( default=0.0, @@ -151,6 +144,7 @@ class ThermalProperties(ClusterProperties): le=1, description="Forced Volatility", alias="volatility.forced", + title="Forced Volatility", ) volatility_planned: float = Field( default=0.0, @@ -158,51 +152,60 @@ class ThermalProperties(ClusterProperties): le=1, description="Planned volatility", alias="volatility.planned", + title="Planned Volatility", ) law_forced: LawOption = Field( default=LawOption.UNIFORM, description="Forced Law (ts-generator)", alias="law.forced", + title="Forced Law", ) law_planned: LawOption = Field( default=LawOption.UNIFORM, description="Planned Law (ts-generator)", alias="law.planned", + title="Planned Law", ) marginal_cost: float = Field( default=0.0, ge=0, description="Marginal cost (euros/MWh)", alias="marginal-cost", + title="Marginal Cost", ) spread_cost: float = Field( default=0.0, ge=0, description="Spread (euros/MWh)", alias="spread-cost", + title="Spread Cost", ) fixed_cost: float = Field( default=0.0, ge=0, description="Fixed cost (euros/hour)", alias="fixed-cost", + title="Fixed Cost", ) startup_cost: float = Field( default=0.0, ge=0, description="Startup cost (euros/startup)", alias="startup-cost", + title="Startup Cost", ) market_bid_cost: float = Field( default=0.0, ge=0, description="Market bid cost (euros/MWh)", alias="market-bid-cost", + title="Market Bid Cost", ) co2: float = Field( default=0.0, ge=0, description="Emission rate of CO2 (t/MWh)", + title="Emission rate of CO2", ) @@ -215,62 +218,74 @@ class Thermal860Properties(ThermalProperties): default=0.0, ge=0, description="Emission rate of NH3 (t/MWh)", + title="Emission rate of NH3", ) so2: float = Field( default=0.0, ge=0, description="Emission rate of SO2 (t/MWh)", + title="Emission rate of SO2", ) nox: float = Field( default=0.0, ge=0, description="Emission rate of NOX (t/MWh)", + title="Emission rate of NOX", ) pm2_5: float = Field( default=0.0, ge=0, description="Emission rate of PM 2.5 (t/MWh)", + title="Emission rate of PM 2.5", alias="pm2_5", ) pm5: float = Field( default=0.0, ge=0, description="Emission rate of PM 5 (t/MWh)", + title="Emission rate of PM 5", ) pm10: float = Field( default=0.0, ge=0, description="Emission rate of PM 10 (t/MWh)", + title="Emission rate of PM 10", ) nmvoc: float = Field( default=0.0, ge=0, description="Emission rate of NMVOC (t/MWh)", + title="Emission rate of NMVOC", ) op1: float = Field( default=0.0, ge=0, description="Emission rate of pollutant 1 (t/MWh)", + title="Emission rate of pollutant 1", ) op2: float = Field( default=0.0, ge=0, description="Emission rate of pollutant 2 (t/MWh)", + title="Emission rate of pollutant 2", ) op3: float = Field( default=0.0, ge=0, description="Emission rate of pollutant 3 (t/MWh)", + title="Emission rate of pollutant 3", ) op4: float = Field( default=0.0, ge=0, description="Emission rate of pollutant 4 (t/MWh)", + title="Emission rate of pollutant 4", ) op5: float = Field( default=0.0, ge=0, description="Emission rate of pollutant 5 (t/MWh)", + title="Emission rate of pollutant 5", ) @@ -284,18 +299,21 @@ class Thermal870Properties(Thermal860Properties): default=ThermalCostGeneration.SET_MANUALLY, alias="costgeneration", description="Cost generation option", + title="Cost Generation", ) efficiency: float = Field( default=100.0, ge=0, le=100, description="Efficiency (%)", + title="Efficiency", ) # Even if `variableomcost` is a cost it could be negative. variable_o_m_cost: float = Field( default=0.0, description="Operating and Maintenance Cost (€/MWh)", alias="variableomcost", + title="Variable O&M Cost", ) @@ -375,6 +393,25 @@ class Thermal870Config(Thermal870Properties, IgnoreCaseIdentifier): ThermalConfigType = t.Union[Thermal870Config, Thermal860Config, ThermalConfig] +def get_thermal_config_cls(study_version: t.Union[str, int]) -> t.Type[ThermalConfigType]: + """ + Retrieves the thermal configuration class based on the study version. + + Args: + study_version: The version of the study. + + Returns: + The thermal configuration class. + """ + version = int(study_version) + if version >= 870: + return Thermal870Config + elif version == 860: + return Thermal860Config + else: + return ThermalConfig + + def create_thermal_config(study_version: t.Union[str, int], **kwargs: t.Any) -> ThermalConfigType: """ Factory method to create a thermal configuration model. @@ -389,10 +426,5 @@ def create_thermal_config(study_version: t.Union[str, int], **kwargs: t.Any) -> Raises: ValueError: If the study version is not supported. """ - version = int(study_version) - if version >= 870: - return Thermal870Config(**kwargs) - elif version == 860: - return Thermal860Config(**kwargs) - else: - return ThermalConfig(**kwargs) + cls = get_thermal_config_cls(study_version) + return cls(**kwargs) diff --git a/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py b/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py index d66df2c970..15e5e72f09 100644 --- a/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py +++ b/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py @@ -139,6 +139,25 @@ class OptionalProperties(BindingConstraintProperties870, metaclass=AllOptionalMe # ================================================================================= +BindingConstraintPropertiesType = t.Union[BindingConstraintProperties870, BindingConstraintProperties] + + +def get_binding_constraint_config_cls(study_version: t.Union[str, int]) -> t.Type[BindingConstraintPropertiesType]: + """ + Retrieves the short-term storage configuration class based on the study version. + + Args: + study_version: The version of the study. + + Returns: + The short-term storage configuration class. + """ + version = int(study_version) + if version >= 870: + return BindingConstraintProperties870 + return BindingConstraintProperties + + class BindingConstraintMatrices(BaseModel, extra=Extra.forbid, allow_population_by_field_name=True): """ Class used to store the matrices of a binding constraint. diff --git a/antarest/study/web/study_data_blueprint.py b/antarest/study/web/study_data_blueprint.py index 2a9777801d..6339033421 100644 --- a/antarest/study/web/study_data_blueprint.py +++ b/antarest/study/web/study_data_blueprint.py @@ -9,7 +9,7 @@ from antarest.core.config import Config from antarest.core.jwt import JWTUser -from antarest.core.model import StudyPermissionType +from antarest.core.model import JSON, StudyPermissionType from antarest.core.requests import RequestParameters from antarest.core.utils.utils import sanitize_uuid from antarest.core.utils.web import APITag @@ -861,6 +861,19 @@ def get_table_mode( table_data = study_service.table_mode_manager.get_table_data(study, table_type, column_list) return table_data + @bp.get( + path="/table-schema/{table_type}", + tags=[APITag.study_data], + summary="Get table schema", + ) + def get_table_schema( + table_type: TableTemplateType, + current_user: JWTUser = Depends(auth.get_current_user), + ) -> JSON: + logger.info("Getting table schema", extra={"user": current_user.id}) + model_schema = study_service.table_mode_manager.get_table_schema(table_type) + return model_schema + @bp.put( path="/studies/{uuid}/table-mode/{table_type}", tags=[APITag.study_data], @@ -878,7 +891,7 @@ def set_table_mode( ) params = RequestParameters(user=current_user) study = study_service.check_study_access(uuid, StudyPermissionType.WRITE, params) - table_data = study_service.table_mode_manager.set_table_data(study, table_type, data) + table_data = study_service.table_mode_manager.update_table_data(study, table_type, data) return table_data @bp.post( diff --git a/tests/integration/study_data_blueprint/test_binding_constraints.py b/tests/integration/study_data_blueprint/test_binding_constraints.py index e07943573d..29394c3b2f 100644 --- a/tests/integration/study_data_blueprint/test_binding_constraints.py +++ b/tests/integration/study_data_blueprint/test_binding_constraints.py @@ -594,7 +594,7 @@ def test_lifecycle__nominal(self, client: TestClient, user_access_token: str, st # Delete a fake binding constraint res = client.delete(f"/v1/studies/{study_id}/bindingconstraints/fake_bc", headers=user_headers) assert res.status_code == 404, res.json() - assert res.json()["exception"] == "BindingConstraintNotFoundError" + assert res.json()["exception"] == "BindingConstraintNotFound" assert res.json()["description"] == "Binding constraint 'fake_bc' not found" # Add a group before v8.7 diff --git a/tests/integration/study_data_blueprint/test_table_mode.py b/tests/integration/study_data_blueprint/test_table_mode.py index bf45d3407f..d519693805 100644 --- a/tests/integration/study_data_blueprint/test_table_mode.py +++ b/tests/integration/study_data_blueprint/test_table_mode.py @@ -56,41 +56,58 @@ def test_lifecycle__nominal( # Table Mode - Area # ================= - # res = client.put( - # f"/v1/studies/{study_id}/table-mode/areas", - # headers=user_headers, - # json={ - # "de": { - # "averageUnsuppliedEnergyCost": 3456, - # "dispatchableHydroPower": False, - # "filterSynthesis": "daily, monthly", - # "filterYearByYear": "weekly, annual", - # }, - # "es": { - # "adequacyPatchMode": "inside", - # "spreadSpilledEnergyCost": None, # not changed - # }, - # }, - # ) - # assert res.status_code == 200, res.json() + # Get the schema of the areas table + res = client.get( + f"/v1/table-schema/areas", + headers=user_headers, + ) + assert res.status_code == 200, res.json() + actual = res.json() + assert set(actual["properties"]) == { + "adequacyPatchMode", + "averageSpilledEnergyCost", + "averageUnsuppliedEnergyCost", + "dispatchableHydroPower", + "filterSynthesis", + "filterYearByYear", + "nonDispatchablePower", + "otherDispatchablePower", + "spreadSpilledEnergyCost", + "spreadUnsuppliedEnergyCost", + } - res = client.get(f"/v1/studies/{study_id}/table-mode/areas", headers=user_headers) + res = client.put( + f"/v1/studies/{study_id}/table-mode/areas", + headers=user_headers, + json={ + "de": { + "averageUnsuppliedEnergyCost": 3456, + "dispatchableHydroPower": False, + "filterSynthesis": "daily, monthly", # not changed + "filterYearByYear": "weekly, annual", + }, + "es": { + "adequacyPatchMode": "inside", + "spreadSpilledEnergyCost": None, # not changed + }, + }, + ) assert res.status_code == 200, res.json() - expected = { + expected_areas = { "de": { "adequacyPatchMode": "outside", "averageSpilledEnergyCost": 0, - "averageUnsuppliedEnergyCost": 3000, - "dispatchableHydroPower": True, + "averageUnsuppliedEnergyCost": 3456, + "dispatchableHydroPower": False, "filterSynthesis": "daily, monthly", - "filterYearByYear": "hourly, weekly, annual", + "filterYearByYear": "weekly, annual", "nonDispatchablePower": True, "otherDispatchablePower": True, "spreadSpilledEnergyCost": 0, "spreadUnsuppliedEnergyCost": 0, }, "es": { - "adequacyPatchMode": "outside", + "adequacyPatchMode": "inside", "averageSpilledEnergyCost": 0, "averageUnsuppliedEnergyCost": 3000, "dispatchableHydroPower": True, @@ -127,11 +144,37 @@ def test_lifecycle__nominal( }, } actual = res.json() - assert actual == expected + assert actual == expected_areas + + res = client.get(f"/v1/studies/{study_id}/table-mode/areas", headers=user_headers) + assert res.status_code == 200, res.json() + actual = res.json() + assert actual == expected_areas # Table Mode - Links # ================== + # Get the schema of the links table + res = client.get( + f"/v1/table-schema/links", + headers=user_headers, + ) + assert res.status_code == 200, res.json() + actual = res.json() + assert set(actual["properties"]) == { + "assetType", + "colorRgb", + "displayComments", + "filterSynthesis", + "filterYearByYear", + "hurdlesCost", + "linkStyle", + "linkWidth", + "loopFlow", + "transmissionCapacities", + "usePhaseShifter", + } + res = client.put( f"/v1/studies/{study_id}/table-mode/links", headers=user_headers, @@ -217,6 +260,53 @@ def test_lifecycle__nominal( # Table Mode - Thermal Clusters # ============================= + # Get the schema of the thermals table + res = client.get( + f"/v1/table-schema/thermals", + headers=user_headers, + ) + assert res.status_code == 200, res.json() + actual = res.json() + assert set(actual["properties"]) == { + "co2", + "costGeneration", + "efficiency", + "enabled", + "fixedCost", + "genTs", + "group", + "id", + "lawForced", + "lawPlanned", + "marginalCost", + "marketBidCost", + "minDownTime", + "minStablePower", + "minUpTime", + "mustRun", + "name", + "nh3", + "nmvoc", + "nominalCapacity", + "nox", + "op1", + "op2", + "op3", + "op4", + "op5", + "pm10", + "pm25", + "pm5", + "so2", + "spinning", + "spreadCost", + "startupCost", + "unitCount", + "variableOMCost", + "volatilityForced", + "volatilityPlanned", + } + res = client.put( f"/v1/studies/{study_id}/table-mode/thermals", headers=user_headers, @@ -238,82 +328,82 @@ def test_lifecycle__nominal( assert res.status_code == 200, res.json() expected_thermals = { "de / 01_solar": { - "co2": 0.0, + "co2": 0, "costGeneration": None, "efficiency": None, "enabled": True, - "fixedCost": 0.0, + "fixedCost": 0, "genTs": "use global", "group": "Other 2", "id": "01_solar", "lawForced": "uniform", "lawPlanned": "uniform", - "marginalCost": 10.0, - "marketBidCost": 10.0, + "marginalCost": 10, + "marketBidCost": 10, "minDownTime": 1, - "minStablePower": 0.0, + "minStablePower": 0, "minUpTime": 1, "mustRun": False, "name": "01_solar", - "nh3": 0.0, - "nmvoc": 0.0, - "nominalCapacity": 500000.0, - "nox": 0.0, - "op1": 0.0, - "op2": 0.0, - "op3": 0.0, - "op4": 0.0, - "op5": 0.0, - "pm10": 0.0, - "pm25": 0.0, - "pm5": 0.0, + "nh3": 0, + "nmvoc": 0, + "nominalCapacity": 500000, + "nox": 0, + "op1": 0, + "op2": 0, + "op3": 0, + "op4": 0, + "op5": 0, + "pm10": 0, + "pm25": 0, + "pm5": 0, "so2": 8.25, - "spinning": 0.0, - "spreadCost": 0.0, - "startupCost": 0.0, + "spinning": 0, + "spreadCost": 0, + "startupCost": 0, "unitCount": 17, "variableOMCost": None, - "volatilityForced": 0.0, - "volatilityPlanned": 0.0, + "volatilityForced": 0, + "volatilityPlanned": 0, }, "de / 02_wind_on": { - "co2": 123.0, + "co2": 123, "costGeneration": None, "efficiency": None, "enabled": True, - "fixedCost": 0.0, + "fixedCost": 0, "genTs": "use global", "group": "Nuclear", "id": "02_wind_on", "lawForced": "uniform", "lawPlanned": "uniform", - "marginalCost": 20.0, - "marketBidCost": 20.0, + "marginalCost": 20, + "marketBidCost": 20, "minDownTime": 1, - "minStablePower": 0.0, + "minStablePower": 0, "minUpTime": 1, "mustRun": False, "name": "02_wind_on", - "nh3": 0.0, - "nmvoc": 0.0, - "nominalCapacity": 314159.0, - "nox": 0.0, - "op1": 0.0, - "op2": 0.0, - "op3": 0.0, - "op4": 0.0, - "op5": 0.0, - "pm10": 0.0, - "pm25": 0.0, - "pm5": 0.0, - "so2": 0.0, - "spinning": 0.0, - "spreadCost": 0.0, - "startupCost": 0.0, + "nh3": 0, + "nmvoc": 0, + "nominalCapacity": 314159, + "nox": 0, + "op1": 0, + "op2": 0, + "op3": 0, + "op4": 0, + "op5": 0, + "pm10": 0, + "pm25": 0, + "pm5": 0, + "so2": 0, + "spinning": 0, + "spreadCost": 0, + "startupCost": 0, "unitCount": 15, "variableOMCost": None, - "volatilityForced": 0.0, - "volatilityPlanned": 0.0, + "volatilityForced": 0, + "volatilityPlanned": 0, }, } assert res.json()["de / 01_solar"] == expected_thermals["de / 01_solar"] @@ -326,42 +416,42 @@ def test_lifecycle__nominal( ) assert res.status_code == 200, res.json() expected = { - "de / 01_solar": {"group": "Other 2", "nominalCapacity": 500000.0, "so2": 8.25, "unitCount": 17}, - "de / 02_wind_on": {"group": "Nuclear", "nominalCapacity": 314159.0, "so2": 0.0, "unitCount": 15}, - "de / 03_wind_off": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, - "de / 04_res": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, - "de / 05_nuclear": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, - "de / 06_coal": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, - "de / 07_gas": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, - "de / 08_non-res": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, - "de / 09_hydro_pump": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, - "es / 01_solar": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, - "es / 02_wind_on": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, - "es / 03_wind_off": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, - "es / 04_res": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, - "es / 05_nuclear": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, - "es / 06_coal": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, - "es / 07_gas": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, - "es / 08_non-res": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, - "es / 09_hydro_pump": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, - "fr / 01_solar": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, - "fr / 02_wind_on": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, - "fr / 03_wind_off": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, - "fr / 04_res": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, - "fr / 05_nuclear": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, - "fr / 06_coal": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, - "fr / 07_gas": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, - "fr / 08_non-res": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, - "fr / 09_hydro_pump": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, - "it / 01_solar": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, - "it / 02_wind_on": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, - "it / 03_wind_off": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, - "it / 04_res": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, - "it / 05_nuclear": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, - "it / 06_coal": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, - "it / 07_gas": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, - "it / 08_non-res": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, - "it / 09_hydro_pump": {"group": "Other 1", "nominalCapacity": 1000000.0, "so2": 0.0, "unitCount": 1}, + "de / 01_solar": {"group": "Other 2", "nominalCapacity": 500000, "so2": 8.25, "unitCount": 17}, + "de / 02_wind_on": {"group": "Nuclear", "nominalCapacity": 314159, "so2": 0, "unitCount": 15}, + "de / 03_wind_off": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "de / 04_res": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "de / 05_nuclear": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "de / 06_coal": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "de / 07_gas": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "de / 08_non-res": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "de / 09_hydro_pump": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "es / 01_solar": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "es / 02_wind_on": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "es / 03_wind_off": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "es / 04_res": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "es / 05_nuclear": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "es / 06_coal": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "es / 07_gas": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "es / 08_non-res": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "es / 09_hydro_pump": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "fr / 01_solar": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "fr / 02_wind_on": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "fr / 03_wind_off": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "fr / 04_res": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "fr / 05_nuclear": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "fr / 06_coal": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "fr / 07_gas": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "fr / 08_non-res": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "fr / 09_hydro_pump": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "it / 01_solar": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "it / 02_wind_on": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "it / 03_wind_off": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "it / 04_res": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "it / 05_nuclear": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "it / 06_coal": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "it / 07_gas": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "it / 08_non-res": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "it / 09_hydro_pump": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, } actual = res.json() assert actual == expected @@ -429,6 +519,34 @@ def test_lifecycle__nominal( ) res.raise_for_status() + # Get the schema of the renewables table + res = client.get( + f"/v1/table-schema/renewables", + headers=user_headers, + ) + assert res.status_code == 200, res.json() + actual = res.json() + assert set(actual["properties"]) == { + "enabled", + "group", + "id", + "name", + "nominalCapacity", + "tsInterpretation", + "unitCount", + } + + # Update some generators using the table mode + res = client.put( + f"/v1/studies/{study_id}/table-mode/renewables", + headers=user_headers, + json={ + "fr / Dieppe": {"enabled": False}, + "fr / La Rochelle": {"enabled": True, "nominalCapacity": 3.1, "unitCount": 2}, + "it / Pouilles": {"group": "Wind Onshore"}, + }, + ) + res = client.get( f"/v1/studies/{study_id}/table-mode/renewables", headers=user_headers, @@ -436,10 +554,10 @@ def test_lifecycle__nominal( ) assert res.status_code == 200, res.json() expected = { - "fr / Dieppe": {"enabled": True, "group": "Wind Offshore", "nominalCapacity": 8, "unitCount": 62}, - "fr / La Rochelle": {"enabled": True, "group": "Solar PV", "nominalCapacity": 2.1, "unitCount": 1}, + "fr / Dieppe": {"enabled": False, "group": "Wind Offshore", "nominalCapacity": 8, "unitCount": 62}, + "fr / La Rochelle": {"enabled": True, "group": "Solar PV", "nominalCapacity": 3.1, "unitCount": 2}, "fr / Oleron": {"enabled": True, "group": "Wind Offshore", "nominalCapacity": 15, "unitCount": 70}, - "it / Pouilles": {"enabled": False, "group": "Wind Offshore", "nominalCapacity": 11, "unitCount": 40}, + "it / Pouilles": {"enabled": False, "group": "Wind Onshore", "nominalCapacity": 11, "unitCount": 40}, "it / Sardaigne": {"enabled": True, "group": "Wind Offshore", "nominalCapacity": 12, "unitCount": 86}, "it / Sicile": {"enabled": True, "group": "Solar PV", "nominalCapacity": 1.8, "unitCount": 1}, } @@ -449,6 +567,25 @@ def test_lifecycle__nominal( # Table Mode - Short Term Storage # =============================== + # Get the schema of the short-term storages table + res = client.get( + f"/v1/table-schema/st-storages", + headers=user_headers, + ) + assert res.status_code == 200, res.json() + actual = res.json() + assert set(actual["properties"]) == { + "efficiency", + "group", + "id", + "initialLevel", + "initialLevelOptim", + "injectionNominalCapacity", + "name", + "reservoirCapacity", + "withdrawalNominalCapacity", + } + # Prepare data for short-term storage tests storage_by_country = { "fr": { @@ -499,8 +636,67 @@ def test_lifecycle__nominal( ) res.raise_for_status() + # Update some generators using the table mode + res = client.put( + f"/v1/studies/{study_id}/table-mode/st-storages", + headers=user_headers, + json={ + "fr / siemens": {"injectionNominalCapacity": 1550, "withdrawalNominalCapacity": 1550}, + "fr / tesla": {"efficiency": 0.75, "initialLevel": 0.89, "initialLevelOptim": False}, + "it / storage3": {"group": "Pondage"}, + }, + ) + assert res.status_code == 200, res.json() + actual = res.json() + assert actual == { + "fr / siemens": { + "efficiency": 1, + "group": "Battery", + "id": "siemens", + "initialLevel": 0.5, + "initialLevelOptim": False, + "injectionNominalCapacity": 1550, + "name": "Siemens", + "reservoirCapacity": 1500, + "withdrawalNominalCapacity": 1550, + }, + "fr / tesla": { + "efficiency": 0.75, + "group": "Battery", + "id": "tesla", + "initialLevel": 0.89, + "initialLevelOptim": False, + "injectionNominalCapacity": 1200, + "name": "Tesla", + "reservoirCapacity": 1200, + "withdrawalNominalCapacity": 1200, + }, + "it / storage3": { + "efficiency": 1, + "group": "Pondage", + "id": "storage3", + "initialLevel": 1, + "initialLevelOptim": False, + "injectionNominalCapacity": 1234, + "name": "storage3", + "reservoirCapacity": 1357, + "withdrawalNominalCapacity": 1020, + }, + "it / storage4": { + "efficiency": 1, + "group": "PSP_open", + "id": "storage4", + "initialLevel": 0.5, + "initialLevelOptim": True, + "injectionNominalCapacity": 567, + "name": "storage4", + "reservoirCapacity": 500, + "withdrawalNominalCapacity": 456, + }, + } + res = client.get( - f"/v1/studies/{study_id}/table-mode/storages", + f"/v1/studies/{study_id}/table-mode/st-storages", headers=user_headers, params={ "columns": ",".join( @@ -518,31 +714,31 @@ def test_lifecycle__nominal( expected = { "fr / siemens": { "group": "Battery", - "injectionNominalCapacity": 1500, + "injectionNominalCapacity": 1550, "reservoirCapacity": 1500, - "withdrawalNominalCapacity": 1500, "unknowColumn": None, + "withdrawalNominalCapacity": 1550, }, "fr / tesla": { "group": "Battery", "injectionNominalCapacity": 1200, "reservoirCapacity": 1200, - "withdrawalNominalCapacity": 1200, "unknowColumn": None, + "withdrawalNominalCapacity": 1200, }, "it / storage3": { - "group": "PSP_open", + "group": "Pondage", "injectionNominalCapacity": 1234, "reservoirCapacity": 1357, - "withdrawalNominalCapacity": 1020, "unknowColumn": None, + "withdrawalNominalCapacity": 1020, }, "it / storage4": { "group": "PSP_open", "injectionNominalCapacity": 567, "reservoirCapacity": 500, - "withdrawalNominalCapacity": 456, "unknowColumn": None, + "withdrawalNominalCapacity": 456, }, } actual = res.json() @@ -593,15 +789,65 @@ def test_lifecycle__nominal( ) assert res.status_code == 200, res.json() + # Get the schema of the binding constraints table res = client.get( - f"/v1/studies/{study_id}/table-mode/constraints", + f"/v1/table-schema/binding-constraints", + headers=user_headers, + ) + assert res.status_code == 200, res.json() + actual = res.json() + assert set(actual["properties"]) == { + "comments", + "enabled", + "filterSynthesis", + "filterYearByYear", + "group", + "id", + "name", + "operator", + "terms", + "timeStep", + } + + # Update some binding constraints using the table mode + res = client.put( + f"/v1/studies/{study_id}/table-mode/binding-constraints", + headers=user_headers, + json={ + "binding constraint 1": {"comments": "Hello World!", "enabled": True}, + "binding constraint 2": {"filterSynthesis": "hourly", "filterYearByYear": "hourly", "operator": "both"}, + }, + ) + assert res.status_code == 200, res.json() + actual = res.json() + assert actual == { + "binding constraint 1": { + "comments": "Hello World!", + "enabled": True, + "filterSynthesis": "", + "filterYearByYear": "", + "operator": "less", + "timeStep": "hourly", + }, + "binding constraint 2": { + "comments": "This is a binding constraint", + "enabled": False, + "filterSynthesis": "hourly", + "filterYearByYear": "hourly", + "operator": "both", + "timeStep": "daily", + }, + } + + res = client.get( + f"/v1/studies/{study_id}/table-mode/binding-constraints", headers=user_headers, params={"columns": ""}, ) assert res.status_code == 200, res.json() expected = { "binding constraint 1": { - "comments": "", + "comments": "Hello World!", "enabled": True, "filterSynthesis": "", "filterYearByYear": "", @@ -611,9 +857,9 @@ def test_lifecycle__nominal( "binding constraint 2": { "comments": "This is a binding constraint", "enabled": False, - "filterSynthesis": "hourly, daily, weekly", - "filterYearByYear": "", - "operator": "greater", + "filterSynthesis": "hourly", + "filterYearByYear": "hourly", + "operator": "both", "timeStep": "daily", }, } diff --git a/tests/storage/business/test_arealink_manager.py b/tests/storage/business/test_arealink_manager.py index 6d664d329b..af01133b8c 100644 --- a/tests/storage/business/test_arealink_manager.py +++ b/tests/storage/business/test_arealink_manager.py @@ -195,7 +195,7 @@ def test_area_crud(empty_study: FileStudy, matrix_service: SimpleMatrixService): }, { "target": "input/areas/test/ui/layerColor/0", - "data": "255 , 0 , 100", + "data": "255,0,100", }, ], ), diff --git a/tests/study/business/areas/test_st_storage_management.py b/tests/study/business/areas/test_st_storage_management.py index 1e97acf0d7..f6c221d58b 100644 --- a/tests/study/business/areas/test_st_storage_management.py +++ b/tests/study/business/areas/test_st_storage_management.py @@ -134,7 +134,10 @@ def test_get_all_storages__nominal_case( all_storages = manager.get_all_storages_props(study) # Check - actual = {area_id: [form.dict(by_alias=True) for form in forms] for area_id, forms in all_storages.items()} + actual = { + area_id: [form.dict(by_alias=True) for form in clusters_by_ids.values()] + for area_id, clusters_by_ids in all_storages.items() + } expected = { "west": [ { @@ -171,7 +174,6 @@ def test_get_all_storages__nominal_case( "initialLevelOptim": False, }, ], - "east": [], } assert actual == expected From 404622fd978e2c2aa8bf5ca69c68426ff4294a6a Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Tue, 23 Apr 2024 12:26:52 +0200 Subject: [PATCH 098/147] feat(tablemode,tablemode-ui): add missing columns in table mode Co-authored-by: Samir Kamal <1954121+skamril@users.noreply.github.com> --- antarest/study/business/area_management.py | 27 ++- .../study/business/table_mode_management.py | 77 +++++---- .../rawstudy/model/filesystem/config/links.py | 4 + antarest/study/web/study_data_blueprint.py | 8 +- .../study_data_blueprint/test_table_mode.py | 156 +++++++++++------- .../Configuration/AdequacyPatch/index.tsx | 2 +- .../explore/Configuration/index.tsx | 6 +- .../dialogs/CreateTemplateTableDialog.tsx | 2 +- webapp/src/components/common/TableMode.tsx | 4 +- .../api/studies/tableMode/constants.ts | 92 ++++++++--- .../services/api/studies/tableMode/index.ts | 21 +-- .../services/api/studies/tableMode/types.ts | 4 +- .../services/api/studies/tableMode/utils.ts | 11 -- 13 files changed, 245 insertions(+), 169 deletions(-) delete mode 100644 webapp/src/services/api/studies/tableMode/utils.ts diff --git a/antarest/study/business/area_management.py b/antarest/study/business/area_management.py index d7909acb75..86cfeaddfd 100644 --- a/antarest/study/business/area_management.py +++ b/antarest/study/business/area_management.py @@ -130,12 +130,17 @@ class _BaseAreaDTO( Aggregates the fields of the `OptimizationProperties` and `AdequacyPathProperties` classes, but without the `UIProperties` fields. + Add the `color_rgb` field extracted from `/input/areas//ui.ini` information. + Add the fields extracted from the `/input/thermal/areas.ini` information: - `average_unsupplied_energy_cost` is extracted from `unserverd_energy_cost`, - `average_spilled_energy_cost` is extracted from `spilled_energy_cost`. """ + # Extra field which represents the Area color + color_rgb: str = Field("#E66C2C", description="color of the area in the map") + average_unsupplied_energy_cost: float = Field(0.0, description="average unserverd energy cost (€/MWh)") average_spilled_energy_cost: float = Field(0.0, description="average spilled energy cost (€/MWh)") @@ -166,6 +171,7 @@ def from_model( The `GetAreaDTO` object. """ obj = { + "color_rgb": area_folder.ui.style.color_rgb, "average_unsupplied_energy_cost": average_unsupplied_energy_cost, "average_spilled_energy_cost": average_spilled_energy_cost, **area_folder.optimization.filtering.dict(by_alias=False), @@ -190,12 +196,17 @@ def _to_adequacy_patch(self) -> AdequacyPathProperties: adequacy_path_section = AdequacyPathProperties.AdequacyPathSection(**obj) return AdequacyPathProperties(adequacy_patch=adequacy_path_section) + def _to_ui(self) -> UIProperties: + """Construct a partially initialized UI object with only the Area color.""" + style_section = AreaUI(color_rgb=self.color_rgb) + return UIProperties(style=style_section) + @property def area_folder(self) -> AreaFolder: area_folder = AreaFolder( optimization=self._to_optimization(), adequacy_patch=self._to_adequacy_patch(), - # ui properties are not included in the AreaFolder. + ui=self._to_ui(), ) return area_folder @@ -312,7 +323,7 @@ def update_areas_props( command_context=command_context, ) ) - if old_area_folder.adequacy_patch != new_area_folder.adequacy_patch: + if old_area_folder.adequacy_patch != new_area_folder.adequacy_patch and new_area_folder.adequacy_patch: commands.append( UpdateConfig( target=f"input/areas/{area_id}/adequacy_patch", @@ -320,6 +331,18 @@ def update_areas_props( command_context=command_context, ) ) + if old_area_folder.ui != new_area_folder.ui: + # update only the color fields, one by one + _color_fields = {"color_r", "color_g", "color_b"} + _data = new_area_folder.ui.style.to_config() + for _color_field in _color_fields: + commands.append( + UpdateConfig( + target=f"input/areas/{area_id}/ui/ui/{_color_field}", + data=_data[_color_field], + command_context=command_context, + ) + ) if old_area.average_unsupplied_energy_cost != new_area.average_unsupplied_energy_cost: commands.append( UpdateConfig( diff --git a/antarest/study/business/table_mode_management.py b/antarest/study/business/table_mode_management.py index 3b8a514507..49cce6b187 100644 --- a/antarest/study/business/table_mode_management.py +++ b/antarest/study/business/table_mode_management.py @@ -5,10 +5,7 @@ from antarest.core.model import JSON from antarest.study.business.area_management import AreaManager, AreaOutput -from antarest.study.business.areas.renewable_management import ( - RenewableClusterInput, - RenewableManager, -) +from antarest.study.business.areas.renewable_management import RenewableClusterInput, RenewableManager from antarest.study.business.areas.st_storage_management import STStorageInput, STStorageManager from antarest.study.business.areas.thermal_management import ThermalClusterInput, ThermalManager from antarest.study.business.binding_constraint_management import BindingConstraintManager, ConstraintInput @@ -22,9 +19,9 @@ TableDataDTO = t.Mapping[_TableIndex, t.Mapping[_TableColumn, _CellValue]] -class TableTemplateType(EnumIgnoreCase): +class TableModeType(EnumIgnoreCase): """ - Table template types. + Table types. This enum is used to define the different types of tables that can be created by the user to leverage the editing capabilities of multiple objects at once. @@ -32,16 +29,16 @@ class TableTemplateType(EnumIgnoreCase): Attributes: AREA: Area table. LINK: Link table. - THERMAL_CLUSTER: Thermal clusters table. - RENEWABLE_CLUSTER: Renewable clusters table. + THERMAL: Thermal clusters table. + RENEWABLE: Renewable clusters table. ST_STORAGE: Short-Term Storages table. BINDING_CONSTRAINT: Binding constraints table. """ AREA = "areas" LINK = "links" - THERMAL_CLUSTER = "thermals" - RENEWABLE_CLUSTER = "renewables" + THERMAL = "thermals" + RENEWABLE = "renewables" # Avoid "storages" because we may have "lt-storages" (long-term storages) in the future ST_STORAGE = "st-storages" # Avoid "constraints" because we may have other kinds of constraints in the future @@ -68,39 +65,39 @@ def __init__( def get_table_data( self, study: RawStudy, - table_type: TableTemplateType, + table_type: TableModeType, columns: t.Sequence[_TableColumn], ) -> TableDataDTO: - if table_type == TableTemplateType.AREA: + if table_type == TableModeType.AREA: areas_map = self._area_manager.get_all_area_props(study) data = {area_id: area.dict(by_alias=True) for area_id, area in areas_map.items()} - elif table_type == TableTemplateType.LINK: + elif table_type == TableModeType.LINK: links_map = self._link_manager.get_all_links_props(study) data = { f"{area1_id} / {area2_id}": link.dict(by_alias=True) for (area1_id, area2_id), link in links_map.items() } - elif table_type == TableTemplateType.THERMAL_CLUSTER: + elif table_type == TableModeType.THERMAL: thermals_by_areas = self._thermal_manager.get_all_thermals_props(study) data = { - f"{area_id} / {storage_id}": storage.dict(by_alias=True) + f"{area_id} / {cluster_id}": cluster.dict(by_alias=True, exclude={"id", "name"}) for area_id, thermals_by_ids in thermals_by_areas.items() - for storage_id, storage in thermals_by_ids.items() + for cluster_id, cluster in thermals_by_ids.items() } - elif table_type == TableTemplateType.RENEWABLE_CLUSTER: + elif table_type == TableModeType.RENEWABLE: renewables_by_areas = self._renewable_manager.get_all_renewables_props(study) data = { - f"{area_id} / {storage_id}": storage.dict(by_alias=True) + f"{area_id} / {cluster_id}": cluster.dict(by_alias=True, exclude={"id", "name"}) for area_id, renewables_by_ids in renewables_by_areas.items() - for storage_id, storage in renewables_by_ids.items() + for cluster_id, cluster in renewables_by_ids.items() } - elif table_type == TableTemplateType.ST_STORAGE: + elif table_type == TableModeType.ST_STORAGE: storages_by_areas = self._st_storage_manager.get_all_storages_props(study) data = { - f"{area_id} / {storage_id}": storage.dict(by_alias=True) + f"{area_id} / {cluster_id}": cluster.dict(by_alias=True, exclude={"id", "name"}) for area_id, storages_by_ids in storages_by_areas.items() - for storage_id, storage in storages_by_ids.items() + for cluster_id, cluster in storages_by_ids.items() } - elif table_type == TableTemplateType.BINDING_CONSTRAINT: + elif table_type == TableModeType.BINDING_CONSTRAINT: bc_seq = self._binding_constraint_manager.get_binding_constraints(study) data = {bc.id: bc.dict(by_alias=True, exclude={"id", "name", "terms"}) for bc in bc_seq} else: # pragma: no cover @@ -158,16 +155,16 @@ def get_column_value(col: str, data: t.Dict[str, t.Any]) -> t.Any: def update_table_data( self, study: RawStudy, - table_type: TableTemplateType, + table_type: TableModeType, data: TableDataDTO, ) -> TableDataDTO: - if table_type == TableTemplateType.AREA: + if table_type == TableModeType.AREA: # Use AreaOutput to update properties of areas area_props_by_ids = {key: AreaOutput(**values) for key, values in data.items()} areas_map = self._area_manager.update_areas_props(study, area_props_by_ids) data = {area_id: area.dict(by_alias=True) for area_id, area in areas_map.items()} return data - elif table_type == TableTemplateType.LINK: + elif table_type == TableModeType.LINK: links_map = {tuple(key.split(" / ")): LinkOutput(**values) for key, values in data.items()} updated_map = self._link_manager.update_links_props(study, links_map) # type: ignore data = { @@ -175,7 +172,7 @@ def update_table_data( for (area1_id, area2_id), link in updated_map.items() } return data - elif table_type == TableTemplateType.THERMAL_CLUSTER: + elif table_type == TableModeType.THERMAL: thermals_by_areas: t.MutableMapping[str, t.MutableMapping[str, ThermalClusterInput]] thermals_by_areas = collections.defaultdict(dict) for key, values in data.items(): @@ -183,12 +180,12 @@ def update_table_data( thermals_by_areas[area_id][cluster_id] = ThermalClusterInput(**values) thermals_map = self._thermal_manager.update_thermals_props(study, thermals_by_areas) data = { - f"{area_id} / {cluster_id}": cluster.dict(by_alias=True) + f"{area_id} / {cluster_id}": cluster.dict(by_alias=True, exclude={"id", "name"}) for area_id, thermals_by_ids in thermals_map.items() for cluster_id, cluster in thermals_by_ids.items() } return data - elif table_type == TableTemplateType.RENEWABLE_CLUSTER: + elif table_type == TableModeType.RENEWABLE: renewables_by_areas: t.MutableMapping[str, t.MutableMapping[str, RenewableClusterInput]] renewables_by_areas = collections.defaultdict(dict) for key, values in data.items(): @@ -196,12 +193,12 @@ def update_table_data( renewables_by_areas[area_id][cluster_id] = RenewableClusterInput(**values) renewables_map = self._renewable_manager.update_renewables_props(study, renewables_by_areas) data = { - f"{area_id} / {cluster_id}": cluster.dict(by_alias=True) + f"{area_id} / {cluster_id}": cluster.dict(by_alias=True, exclude={"id", "name"}) for area_id, renewables_by_ids in renewables_map.items() for cluster_id, cluster in renewables_by_ids.items() } return data - elif table_type == TableTemplateType.ST_STORAGE: + elif table_type == TableModeType.ST_STORAGE: storages_by_areas: t.MutableMapping[str, t.MutableMapping[str, STStorageInput]] storages_by_areas = collections.defaultdict(dict) for key, values in data.items(): @@ -209,19 +206,19 @@ def update_table_data( storages_by_areas[area_id][cluster_id] = STStorageInput(**values) storages_map = self._st_storage_manager.update_storages_props(study, storages_by_areas) data = { - f"{area_id} / {cluster_id}": cluster.dict(by_alias=True) + f"{area_id} / {cluster_id}": cluster.dict(by_alias=True, exclude={"id", "name"}) for area_id, storages_by_ids in storages_map.items() for cluster_id, cluster in storages_by_ids.items() } return data - elif table_type == TableTemplateType.BINDING_CONSTRAINT: + elif table_type == TableModeType.BINDING_CONSTRAINT: bcs_by_ids = {key: ConstraintInput(**values) for key, values in data.items()} bcs_map = self._binding_constraint_manager.update_binding_constraints(study, bcs_by_ids) return {bc_id: bc.dict(by_alias=True, exclude={"id", "name", "terms"}) for bc_id, bc in bcs_map.items()} else: # pragma: no cover raise NotImplementedError(f"Table type {table_type} not implemented") - def get_table_schema(self, table_type: TableTemplateType) -> JSON: + def get_table_schema(self, table_type: TableModeType) -> JSON: """ Get the properties of the table columns which type is provided as a parameter. @@ -231,17 +228,17 @@ def get_table_schema(self, table_type: TableTemplateType) -> JSON: Returns: JSON Schema which allows to know the name, title and type of each column. """ - if table_type == TableTemplateType.AREA: + if table_type == TableModeType.AREA: return self._area_manager.get_table_schema() - elif table_type == TableTemplateType.LINK: + elif table_type == TableModeType.LINK: return self._link_manager.get_table_schema() - elif table_type == TableTemplateType.THERMAL_CLUSTER: + elif table_type == TableModeType.THERMAL: return self._thermal_manager.get_table_schema() - elif table_type == TableTemplateType.RENEWABLE_CLUSTER: + elif table_type == TableModeType.RENEWABLE: return self._renewable_manager.get_table_schema() - elif table_type == TableTemplateType.ST_STORAGE: + elif table_type == TableModeType.ST_STORAGE: return self._st_storage_manager.get_table_schema() - elif table_type == TableTemplateType.BINDING_CONSTRAINT: + elif table_type == TableModeType.BINDING_CONSTRAINT: return self._binding_constraint_manager.get_table_schema() else: # pragma: no cover raise NotImplementedError(f"Table type {table_type} not implemented") diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/links.py b/antarest/study/storage/rawstudy/model/filesystem/config/links.py index 979c9a6d09..664941ed96 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/config/links.py +++ b/antarest/study/storage/rawstudy/model/filesystem/config/links.py @@ -76,6 +76,7 @@ class LinkProperties(IniProperties): ... "colorr": "80", ... "colorg": "192", ... "colorb": "255", + ... "comments": "This is a link", ... "display-comments": "true", ... "filter-synthesis": "hourly, daily, weekly, monthly, annual", ... "filter-year-by-year": "hourly, daily, weekly, monthly, annual", @@ -86,6 +87,7 @@ class LinkProperties(IniProperties): >>> pprint(opt.dict(by_alias=True), width=80) {'asset-type': , 'colorRgb': '#50C0FF', + 'comments': 'This is a link', 'display-comments': True, 'filter-synthesis': 'hourly, daily, weekly, monthly, annual', 'filter-year-by-year': 'hourly, daily, weekly, monthly, annual', @@ -101,6 +103,7 @@ class LinkProperties(IniProperties): 'colorb': 255, 'colorg': 192, 'colorr': 80, + 'comments': 'This is a link', 'display-comments': True, 'filter-synthesis': 'hourly, daily, weekly, monthly, annual', 'filter-year-by-year': 'hourly, daily, weekly, monthly, annual', @@ -121,6 +124,7 @@ class LinkProperties(IniProperties): asset_type: AssetType = Field(default=AssetType.AC, alias="asset-type") link_style: str = Field(default="plain", alias="link-style") link_width: int = Field(default=1, alias="link-width") + comments: str = Field(default="", alias="comments") # unknown field?! display_comments: bool = Field(default=True, alias="display-comments") filter_synthesis: str = Field(default="", alias="filter-synthesis") filter_year_by_year: str = Field(default="", alias="filter-year-by-year") diff --git a/antarest/study/web/study_data_blueprint.py b/antarest/study/web/study_data_blueprint.py index 6339033421..62ad33d970 100644 --- a/antarest/study/web/study_data_blueprint.py +++ b/antarest/study/web/study_data_blueprint.py @@ -54,7 +54,7 @@ from antarest.study.business.link_management import LinkInfoDTO from antarest.study.business.optimization_management import OptimizationFormFields from antarest.study.business.playlist_management import PlaylistColumns -from antarest.study.business.table_mode_management import TableDataDTO, TableTemplateType +from antarest.study.business.table_mode_management import TableDataDTO, TableModeType from antarest.study.business.thematic_trimming_field_infos import ThematicTrimmingFormFields from antarest.study.business.timeseries_config_management import TSFormFields from antarest.study.model import PatchArea, PatchCluster @@ -847,7 +847,7 @@ def set_timeseries_form_values( ) def get_table_mode( uuid: str, - table_type: TableTemplateType, + table_type: TableModeType, columns: str = "", current_user: JWTUser = Depends(auth.get_current_user), ) -> TableDataDTO: @@ -867,7 +867,7 @@ def get_table_mode( summary="Get table schema", ) def get_table_schema( - table_type: TableTemplateType, + table_type: TableModeType, current_user: JWTUser = Depends(auth.get_current_user), ) -> JSON: logger.info("Getting table schema", extra={"user": current_user.id}) @@ -881,7 +881,7 @@ def get_table_schema( ) def set_table_mode( uuid: str, - table_type: TableTemplateType, + table_type: TableModeType, data: TableDataDTO, current_user: JWTUser = Depends(auth.get_current_user), ) -> TableDataDTO: diff --git a/tests/integration/study_data_blueprint/test_table_mode.py b/tests/integration/study_data_blueprint/test_table_mode.py index d519693805..8bebb875fa 100644 --- a/tests/integration/study_data_blueprint/test_table_mode.py +++ b/tests/integration/study_data_blueprint/test_table_mode.py @@ -64,16 +64,21 @@ def test_lifecycle__nominal( assert res.status_code == 200, res.json() actual = res.json() assert set(actual["properties"]) == { - "adequacyPatchMode", - "averageSpilledEnergyCost", - "averageUnsuppliedEnergyCost", - "dispatchableHydroPower", - "filterSynthesis", - "filterYearByYear", + # UI + "colorRgb", + # Optimization - Nodal optimization "nonDispatchablePower", + "dispatchableHydroPower", "otherDispatchablePower", - "spreadSpilledEnergyCost", + "averageUnsuppliedEnergyCost", "spreadUnsuppliedEnergyCost", + "averageSpilledEnergyCost", + "spreadSpilledEnergyCost", + # Optimization - Filtering + "filterSynthesis", + "filterYearByYear", + # Adequacy patch + "adequacyPatchMode", } res = client.put( @@ -90,6 +95,9 @@ def test_lifecycle__nominal( "adequacyPatchMode": "inside", "spreadSpilledEnergyCost": None, # not changed }, + "fr": { + "colorRgb": "#C00000", + }, }, ) assert res.status_code == 200, res.json() @@ -98,6 +106,7 @@ def test_lifecycle__nominal( "adequacyPatchMode": "outside", "averageSpilledEnergyCost": 0, "averageUnsuppliedEnergyCost": 3456, + "colorRgb": "#0080FF", "dispatchableHydroPower": False, "filterSynthesis": "daily, monthly", "filterYearByYear": "weekly, annual", @@ -110,6 +119,7 @@ def test_lifecycle__nominal( "adequacyPatchMode": "inside", "averageSpilledEnergyCost": 0, "averageUnsuppliedEnergyCost": 3000, + "colorRgb": "#0080FF", "dispatchableHydroPower": True, "filterSynthesis": "daily, monthly", "filterYearByYear": "hourly, weekly, annual", @@ -122,6 +132,7 @@ def test_lifecycle__nominal( "adequacyPatchMode": "outside", "averageSpilledEnergyCost": 0, "averageUnsuppliedEnergyCost": 3000, + "colorRgb": "#C00000", "dispatchableHydroPower": True, "filterSynthesis": "", "filterYearByYear": "hourly", @@ -134,6 +145,7 @@ def test_lifecycle__nominal( "adequacyPatchMode": "outside", "averageSpilledEnergyCost": 0, "averageUnsuppliedEnergyCost": 3000, + "colorRgb": "#0080FF", "dispatchableHydroPower": True, "filterSynthesis": "", "filterYearByYear": "hourly", @@ -162,17 +174,18 @@ def test_lifecycle__nominal( assert res.status_code == 200, res.json() actual = res.json() assert set(actual["properties"]) == { - "assetType", "colorRgb", - "displayComments", - "filterSynthesis", - "filterYearByYear", + "comments", "hurdlesCost", - "linkStyle", - "linkWidth", "loopFlow", - "transmissionCapacities", "usePhaseShifter", + "transmissionCapacities", + "assetType", + "linkStyle", + "linkWidth", + "displayComments", + "filterSynthesis", + "filterYearByYear", } res = client.put( @@ -203,6 +216,7 @@ def test_lifecycle__nominal( "usePhaseShifter": True, }, "fr / it": { + "comments": "Link from France to Italie", "assetType": "DC", # case-insensitive }, }, @@ -212,6 +226,7 @@ def test_lifecycle__nominal( "de / fr": { "assetType": "ac", "colorRgb": "#FFA500", + "comments": "", "displayComments": False, "filterSynthesis": "hourly, daily, weekly, annual", "filterYearByYear": "hourly, daily, monthly, annual", @@ -225,6 +240,7 @@ def test_lifecycle__nominal( "es / fr": { "assetType": "ac", "colorRgb": "#FF6347", + "comments": "", "displayComments": True, "filterSynthesis": "hourly, daily, weekly, monthly, annual", "filterYearByYear": "hourly, daily, weekly, annual", @@ -238,6 +254,7 @@ def test_lifecycle__nominal( "fr / it": { "assetType": "dc", "colorRgb": "#707070", + "comments": "Link from France to Italie", "displayComments": True, "filterSynthesis": "", "filterYearByYear": "hourly", @@ -268,43 +285,47 @@ def test_lifecycle__nominal( assert res.status_code == 200, res.json() actual = res.json() assert set(actual["properties"]) == { - "co2", - "costGeneration", - "efficiency", + # read-only fields + "id", + "name", + # Thermals fields + "group", "enabled", - "fixedCost", + "unitCount", + "nominalCapacity", "genTs", - "group", - "id", + "minStablePower", + "minUpTime", + "minDownTime", + "mustRun", + "spinning", + "volatilityForced", + "volatilityPlanned", "lawForced", "lawPlanned", "marginalCost", + "spreadCost", + "fixedCost", + "startupCost", "marketBidCost", - "minDownTime", - "minStablePower", - "minUpTime", - "mustRun", - "name", + # pollutants - since v8.6 (except for "co2") + "co2", "nh3", - "nmvoc", - "nominalCapacity", + "so2", "nox", + "pm25", + "pm5", + "pm10", + "nmvoc", "op1", "op2", "op3", "op4", "op5", - "pm10", - "pm25", - "pm5", - "so2", - "spinning", - "spreadCost", - "startupCost", - "unitCount", + # since v8.7 + "costGeneration", + "efficiency", "variableOMCost", - "volatilityForced", - "volatilityPlanned", } res = client.put( @@ -328,6 +349,8 @@ def test_lifecycle__nominal( assert res.status_code == 200, res.json() expected_thermals = { "de / 01_solar": { + # "id": "01_solar", + # "name": "01_solar", "co2": 0, "costGeneration": None, "efficiency": None, @@ -335,7 +358,6 @@ def test_lifecycle__nominal( "fixedCost": 0, "genTs": "use global", "group": "Other 2", - "id": "01_solar", "lawForced": "uniform", "lawPlanned": "uniform", "marginalCost": 10, @@ -344,7 +366,6 @@ def test_lifecycle__nominal( "minStablePower": 0, "minUpTime": 1, "mustRun": False, - "name": "01_solar", "nh3": 0, "nmvoc": 0, "nominalCapacity": 500000, @@ -367,6 +388,8 @@ def test_lifecycle__nominal( "volatilityPlanned": 0, }, "de / 02_wind_on": { + # "id": "02_wind_on", + # "name": "02_wind_on", "co2": 123, "costGeneration": None, "efficiency": None, @@ -374,7 +397,6 @@ def test_lifecycle__nominal( "fixedCost": 0, "genTs": "use global", "group": "Nuclear", - "id": "02_wind_on", "lawForced": "uniform", "lawPlanned": "uniform", "marginalCost": 20, @@ -383,7 +405,6 @@ def test_lifecycle__nominal( "minStablePower": 0, "minUpTime": 1, "mustRun": False, - "name": "02_wind_on", "nh3": 0, "nmvoc": 0, "nominalCapacity": 314159, @@ -527,13 +548,15 @@ def test_lifecycle__nominal( assert res.status_code == 200, res.json() actual = res.json() assert set(actual["properties"]) == { - "enabled", - "group", + # read-only fields "id", "name", - "nominalCapacity", + # Renewables fields + "group", "tsInterpretation", + "enabled", "unitCount", + "nominalCapacity", } # Update some generators using the table mode @@ -575,15 +598,17 @@ def test_lifecycle__nominal( assert res.status_code == 200, res.json() actual = res.json() assert set(actual["properties"]) == { - "efficiency", - "group", + # read-only fields "id", - "initialLevel", - "initialLevelOptim", - "injectionNominalCapacity", "name", - "reservoirCapacity", + # Short-term storage fields + "group", + "injectionNominalCapacity", "withdrawalNominalCapacity", + "reservoirCapacity", + "efficiency", + "initialLevel", + "initialLevelOptim", } # Prepare data for short-term storage tests @@ -650,46 +675,46 @@ def test_lifecycle__nominal( actual = res.json() assert actual == { "fr / siemens": { + # "id": "siemens", + # "name": "Siemens", "efficiency": 1, "group": "Battery", - "id": "siemens", "initialLevel": 0.5, "initialLevelOptim": False, "injectionNominalCapacity": 1550, - "name": "Siemens", "reservoirCapacity": 1500, "withdrawalNominalCapacity": 1550, }, "fr / tesla": { + # "id": "tesla", + # "name": "Tesla", "efficiency": 0.75, "group": "Battery", - "id": "tesla", "initialLevel": 0.89, "initialLevelOptim": False, "injectionNominalCapacity": 1200, - "name": "Tesla", "reservoirCapacity": 1200, "withdrawalNominalCapacity": 1200, }, "it / storage3": { + # "id": "storage3", + # "name": "storage3", "efficiency": 1, "group": "Pondage", - "id": "storage3", "initialLevel": 1, "initialLevelOptim": False, "injectionNominalCapacity": 1234, - "name": "storage3", "reservoirCapacity": 1357, "withdrawalNominalCapacity": 1020, }, "it / storage4": { + # "id": "storage4", + # "name": "storage4", "efficiency": 1, "group": "PSP_open", - "id": "storage4", "initialLevel": 0.5, "initialLevelOptim": True, "injectionNominalCapacity": 567, - "name": "storage4", "reservoirCapacity": 500, "withdrawalNominalCapacity": 456, }, @@ -797,16 +822,19 @@ def test_lifecycle__nominal( assert res.status_code == 200, res.json() actual = res.json() assert set(actual["properties"]) == { - "comments", - "enabled", - "filterSynthesis", - "filterYearByYear", - "group", + # read-only fields "id", "name", + # Binding Constraints fields + "group", + "enabled", + "timeStep", "operator", + "comments", + "filterSynthesis", + "filterYearByYear", + # Binding Constraints - Terms "terms", - "timeStep", } # Update some binding constraints using the table mode diff --git a/webapp/src/components/App/Singlestudy/explore/Configuration/AdequacyPatch/index.tsx b/webapp/src/components/App/Singlestudy/explore/Configuration/AdequacyPatch/index.tsx index 9e301154bd..9793e24395 100644 --- a/webapp/src/components/App/Singlestudy/explore/Configuration/AdequacyPatch/index.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Configuration/AdequacyPatch/index.tsx @@ -53,7 +53,7 @@ function AdequacyPatch() { content: ( ), diff --git a/webapp/src/components/App/Singlestudy/explore/Configuration/index.tsx b/webapp/src/components/App/Singlestudy/explore/Configuration/index.tsx index eb9550e2bb..cfc8f21993 100644 --- a/webapp/src/components/App/Singlestudy/explore/Configuration/index.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Configuration/index.tsx @@ -63,7 +63,7 @@ function Configuration() { () => ( ( ), @@ -91,7 +91,7 @@ function Configuration() { () => ( ), diff --git a/webapp/src/components/App/Singlestudy/explore/TableModeList/dialogs/CreateTemplateTableDialog.tsx b/webapp/src/components/App/Singlestudy/explore/TableModeList/dialogs/CreateTemplateTableDialog.tsx index 24f7accc77..9e5c223f6f 100644 --- a/webapp/src/components/App/Singlestudy/explore/TableModeList/dialogs/CreateTemplateTableDialog.tsx +++ b/webapp/src/components/App/Singlestudy/explore/TableModeList/dialogs/CreateTemplateTableDialog.tsx @@ -43,7 +43,7 @@ function CreateTemplateTableDialog(props: Props) { config={{ defaultValues: { name: "", - type: "area", + type: "areas", columns: [], }, }} diff --git a/webapp/src/components/common/TableMode.tsx b/webapp/src/components/common/TableMode.tsx index e92ff18011..fa34e1bf64 100644 --- a/webapp/src/components/common/TableMode.tsx +++ b/webapp/src/components/common/TableMode.tsx @@ -23,7 +23,7 @@ function TableMode(props: TableModeProps) { const { studyId, type, columns } = props; const res = usePromise( - () => getTableMode({ studyId, type, columns }), + () => getTableMode({ studyId, tableType: type, columns }), [studyId, type, JSON.stringify(columns)], ); @@ -32,7 +32,7 @@ function TableMode(props: TableModeProps) { //////////////////////////////////////////////////////////////// const handleSubmit = (data: SubmitHandlerPlus) => { - return setTableMode({ studyId, type, data: data.dirtyValues }); + return setTableMode({ studyId, tableType: type, data: data.dirtyValues }); }; //////////////////////////////////////////////////////////////// diff --git a/webapp/src/services/api/studies/tableMode/constants.ts b/webapp/src/services/api/studies/tableMode/constants.ts index 70526c7484..22c19310f1 100644 --- a/webapp/src/services/api/studies/tableMode/constants.ts +++ b/webapp/src/services/api/studies/tableMode/constants.ts @@ -1,19 +1,22 @@ -const AREA = "area"; -const LINK = "link"; -const CLUSTER = "cluster"; -const RENEWABLE = "renewable"; -const BINDING_CONSTRAINT = "binding constraint"; +const AREAS = "areas"; +const LINKS = "links"; +const THERMALS = "thermals"; +const RENEWABLES = "renewables"; +const ST_STORAGES = "st-storages"; +const BINDING_CONSTRAINTS = "binding-constraints"; export const TABLE_MODE_TYPES = [ - AREA, - LINK, - CLUSTER, - RENEWABLE, - BINDING_CONSTRAINT, + AREAS, + LINKS, + THERMALS, + RENEWABLES, + BINDING_CONSTRAINTS, ] as const; export const TABLE_MODE_COLUMNS_BY_TYPE = { - [AREA]: [ + [AREAS]: [ + // UI + "colorRgb", // Optimization - Nodal optimization "nonDispatchablePower", "dispatchableHydroPower", @@ -28,7 +31,7 @@ export const TABLE_MODE_COLUMNS_BY_TYPE = { // Adequacy patch "adequacyPatchMode", ], - [LINK]: [ + [LINKS]: [ "hurdlesCost", "loopFlow", "usePhaseShifter", @@ -36,38 +39,77 @@ export const TABLE_MODE_COLUMNS_BY_TYPE = { "assetType", "linkStyle", "linkWidth", + "comments", // unknown field?! "displayComments", + // Optimization - Filtering "filterSynthesis", "filterYearByYear", ], - [CLUSTER]: [ + [THERMALS]: [ + // "name" is read-only "group", "enabled", - "mustRun", "unitCount", "nominalCapacity", + "genTs", "minStablePower", - "spinning", "minUpTime", "minDownTime", - "co2", - "marginalCost", - "fixedCost", - "startupCost", - "marketBidCost", - "spreadCost", - "tsGen", + "mustRun", + "spinning", "volatilityForced", "volatilityPlanned", "lawForced", "lawPlanned", + "marginalCost", + "spreadCost", + "fixedCost", + "startupCost", + "marketBidCost", + // Pollutants - since v8.6 (except for "co2") + "co2", + "nh3", + "so2", + "nox", + "pm25", + "pm5", + "pm10", + "nmvoc", + "op1", + "op2", + "op3", + "op4", + "op5", + // Since v8.7 + "costGeneration", + "efficiency", + "variableOMCost", ], - [RENEWABLE]: [ + [RENEWABLES]: [ "group", - "tsInterpretation", "enabled", + "tsInterpretation", "unitCount", "nominalCapacity", ], - [BINDING_CONSTRAINT]: ["type", "operator", "enabled"], + [ST_STORAGES]: [ + "group", + // "enabled", // since v8.8 + "injectionNominalCapacity", + "withdrawalNominalCapacity", + "reservoirCapacity", + "efficiency", + "initialLevel", + "initialLevelOptim", + ], + [BINDING_CONSTRAINTS]: [ + "group", + "enabled", + "timeStep", + "operator", + "comments", + // Optimization - Filtering + "filterSynthesis", + "filterYearByYear", + ], } as const; diff --git a/webapp/src/services/api/studies/tableMode/index.ts b/webapp/src/services/api/studies/tableMode/index.ts index 68cd8cc895..03915b259e 100644 --- a/webapp/src/services/api/studies/tableMode/index.ts +++ b/webapp/src/services/api/studies/tableMode/index.ts @@ -6,31 +6,24 @@ import type { TableData, TableModeType, } from "./types"; -import { toColumnApiName } from "./utils"; -const TABLE_MODE_API_URL = `v1/studies/{studyId}/tablemode`; +const TABLE_MODE_API_URL = `v1/studies/{studyId}/table-mode/{tableType}`; export async function getTableMode( params: GetTableModeParams, ) { - const { studyId, type, columns } = params; - const url = format(TABLE_MODE_API_URL, { studyId }); + const { studyId, tableType, columns } = params; + const url = format(TABLE_MODE_API_URL, { studyId, tableType }); const res = await client.get(url, { - params: { - table_type: type, - columns: columns.map(toColumnApiName).join(","), - }, + params: columns.length > 0 ? { columns: columns.join(",") } : {}, }); return res.data; } export async function setTableMode(params: SetTableModeParams) { - const { studyId, type, data } = params; - const url = format(TABLE_MODE_API_URL, { studyId }); - - await client.put(url, data, { - params: { table_type: type }, - }); + const { studyId, tableType, data } = params; + const url = format(TABLE_MODE_API_URL, { studyId, tableType }); + await client.put(url, data); } diff --git a/webapp/src/services/api/studies/tableMode/types.ts b/webapp/src/services/api/studies/tableMode/types.ts index def8344b9e..e20a167e27 100644 --- a/webapp/src/services/api/studies/tableMode/types.ts +++ b/webapp/src/services/api/studies/tableMode/types.ts @@ -15,12 +15,12 @@ export type TableData = Record< export interface GetTableModeParams { studyId: StudyMetadata["id"]; - type: T; + tableType: T; columns: TableModeColumnsForType; } export interface SetTableModeParams { studyId: StudyMetadata["id"]; - type: TableModeType; + tableType: TableModeType; data: DeepPartial; } diff --git a/webapp/src/services/api/studies/tableMode/utils.ts b/webapp/src/services/api/studies/tableMode/utils.ts deleted file mode 100644 index 35ccd7c8a3..0000000000 --- a/webapp/src/services/api/studies/tableMode/utils.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { snakeCase } from "lodash"; -import { TableModeColumnsForType, TableModeType } from "./types"; - -export function toColumnApiName( - column: TableModeColumnsForType[number], -) { - if (column === "co2") { - return "co2"; - } - return snakeCase(column); -} From 4b94eb220a885b0b6a095935374c5bc2bd0d3f47 Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Sun, 7 Apr 2024 14:43:12 +0200 Subject: [PATCH 099/147] fix(tablemode): avoid raising 404 Not Found if table is missing (i.e. renewables) --- .../study/business/table_mode_management.py | 26 +++++++++++++------ 1 file changed, 18 insertions(+), 8 deletions(-) diff --git a/antarest/study/business/table_mode_management.py b/antarest/study/business/table_mode_management.py index 49cce6b187..c513d1a5c2 100644 --- a/antarest/study/business/table_mode_management.py +++ b/antarest/study/business/table_mode_management.py @@ -12,6 +12,7 @@ from antarest.study.business.enum_ignore_case import EnumIgnoreCase from antarest.study.business.link_management import LinkManager, LinkOutput from antarest.study.model import RawStudy +from antarest.study.storage.rawstudy.model.filesystem.folder_node import ChildNotFoundError _TableIndex = str # row name _TableColumn = str # column name @@ -62,12 +63,7 @@ def __init__( self._st_storage_manager = st_storage_manager self._binding_constraint_manager = binding_constraint_manager - def get_table_data( - self, - study: RawStudy, - table_type: TableModeType, - columns: t.Sequence[_TableColumn], - ) -> TableDataDTO: + def _get_table_data_unsafe(self, study, table_type): if table_type == TableModeType.AREA: areas_map = self._area_manager.get_all_area_props(study) data = {area_id: area.dict(by_alias=True) for area_id, area in areas_map.items()} @@ -102,14 +98,28 @@ def get_table_data( data = {bc.id: bc.dict(by_alias=True, exclude={"id", "name", "terms"}) for bc in bc_seq} else: # pragma: no cover raise NotImplementedError(f"Table type {table_type} not implemented") + return data + + def get_table_data( + self, + study: RawStudy, + table_type: TableModeType, + columns: t.Sequence[_TableColumn], + ) -> TableDataDTO: + try: + data = self._get_table_data_unsafe(study, table_type) + except ChildNotFoundError: + # It's better to return an empty table than raising an 404 error + return {} df = pd.DataFrame.from_dict(data, orient="index") if columns: # Create a new dataframe with the listed columns. - # If a column does not exist in the DataFrame, it is created with empty values. df = pd.DataFrame(df, columns=columns) # type: ignore + + # If a column does not exist in the DataFrame, it is created with empty values. # noinspection PyTypeChecker - df = df.where(pd.notna(df), other=None) # + df = df.where(pd.notna(df), other=None) obj = df.to_dict(orient="index") From 233b4499e6224553e0f52956cb01d0156cc00547 Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Sun, 7 Apr 2024 19:40:43 +0200 Subject: [PATCH 100/147] feat(tablemode): correct code style --- antarest/study/business/table_mode_management.py | 4 ++-- .../study_data_blueprint/test_renewable.py | 1 + .../study_data_blueprint/test_table_mode.py | 15 ++++++++------- 3 files changed, 11 insertions(+), 9 deletions(-) diff --git a/antarest/study/business/table_mode_management.py b/antarest/study/business/table_mode_management.py index c513d1a5c2..a7feefdd8d 100644 --- a/antarest/study/business/table_mode_management.py +++ b/antarest/study/business/table_mode_management.py @@ -63,7 +63,7 @@ def __init__( self._st_storage_manager = st_storage_manager self._binding_constraint_manager = binding_constraint_manager - def _get_table_data_unsafe(self, study, table_type): + def _get_table_data_unsafe(self, study: RawStudy, table_type: TableModeType) -> TableDataDTO: if table_type == TableModeType.AREA: areas_map = self._area_manager.get_all_area_props(study) data = {area_id: area.dict(by_alias=True) for area_id, area in areas_map.items()} @@ -112,7 +112,7 @@ def get_table_data( # It's better to return an empty table than raising an 404 error return {} - df = pd.DataFrame.from_dict(data, orient="index") + df = pd.DataFrame.from_dict(data, orient="index") # type: ignore if columns: # Create a new dataframe with the listed columns. df = pd.DataFrame(df, columns=columns) # type: ignore diff --git a/tests/integration/study_data_blueprint/test_renewable.py b/tests/integration/study_data_blueprint/test_renewable.py index 0e57e1464b..8a9d575d97 100644 --- a/tests/integration/study_data_blueprint/test_renewable.py +++ b/tests/integration/study_data_blueprint/test_renewable.py @@ -23,6 +23,7 @@ * delete a cluster (or several clusters) * validate the consistency of the matrices (and properties) """ + import json import re import typing as t diff --git a/tests/integration/study_data_blueprint/test_table_mode.py b/tests/integration/study_data_blueprint/test_table_mode.py index 8bebb875fa..334230dcb4 100644 --- a/tests/integration/study_data_blueprint/test_table_mode.py +++ b/tests/integration/study_data_blueprint/test_table_mode.py @@ -58,7 +58,7 @@ def test_lifecycle__nominal( # Get the schema of the areas table res = client.get( - f"/v1/table-schema/areas", + "/v1/table-schema/areas", headers=user_headers, ) assert res.status_code == 200, res.json() @@ -89,7 +89,7 @@ def test_lifecycle__nominal( "averageUnsuppliedEnergyCost": 3456, "dispatchableHydroPower": False, "filterSynthesis": "daily, monthly", # not changed - "filterYearByYear": "weekly, annual", + "filterYearByYear": "prout", }, "es": { "adequacyPatchMode": "inside", @@ -168,7 +168,7 @@ def test_lifecycle__nominal( # Get the schema of the links table res = client.get( - f"/v1/table-schema/links", + "/v1/table-schema/links", headers=user_headers, ) assert res.status_code == 200, res.json() @@ -279,7 +279,7 @@ def test_lifecycle__nominal( # Get the schema of the thermals table res = client.get( - f"/v1/table-schema/thermals", + "/v1/table-schema/thermals", headers=user_headers, ) assert res.status_code == 200, res.json() @@ -542,7 +542,7 @@ def test_lifecycle__nominal( # Get the schema of the renewables table res = client.get( - f"/v1/table-schema/renewables", + "/v1/table-schema/renewables", headers=user_headers, ) assert res.status_code == 200, res.json() @@ -569,6 +569,7 @@ def test_lifecycle__nominal( "it / Pouilles": {"group": "Wind Onshore"}, }, ) + assert res.status_code == 200, res.json() res = client.get( f"/v1/studies/{study_id}/table-mode/renewables", @@ -592,7 +593,7 @@ def test_lifecycle__nominal( # Get the schema of the short-term storages table res = client.get( - f"/v1/table-schema/st-storages", + "/v1/table-schema/st-storages", headers=user_headers, ) assert res.status_code == 200, res.json() @@ -816,7 +817,7 @@ def test_lifecycle__nominal( # Get the schema of the binding constraints table res = client.get( - f"/v1/table-schema/binding-constraints", + "/v1/table-schema/binding-constraints", headers=user_headers, ) assert res.status_code == 200, res.json() From 2bd92e043fe453a195d6e52bb0daa6d946faf00a Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Sun, 7 Apr 2024 20:17:39 +0200 Subject: [PATCH 101/147] docs(tablemode): add docstring and API docs for Table Mode --- .../study/business/table_mode_management.py | 26 ++++++- antarest/study/web/study_data_blueprint.py | 73 ++++++++++++++----- 2 files changed, 80 insertions(+), 19 deletions(-) diff --git a/antarest/study/business/table_mode_management.py b/antarest/study/business/table_mode_management.py index a7feefdd8d..d72c03a1b3 100644 --- a/antarest/study/business/table_mode_management.py +++ b/antarest/study/business/table_mode_management.py @@ -106,13 +106,25 @@ def get_table_data( table_type: TableModeType, columns: t.Sequence[_TableColumn], ) -> TableDataDTO: + """ + Get the table data of the specified type for the given study. + + Args: + study: The study to get the table data from. + table_type: The type of the table. + columns: The columns to include in the table. If empty, all columns are included. + + Returns: + The table data as a dictionary of dictionaries. + Where keys are the row names and values are dictionaries of column names and cell values. + """ try: data = self._get_table_data_unsafe(study, table_type) except ChildNotFoundError: # It's better to return an empty table than raising an 404 error return {} - df = pd.DataFrame.from_dict(data, orient="index") # type: ignore + df = pd.DataFrame.from_dict(data, orient="index") # type: ignore if columns: # Create a new dataframe with the listed columns. df = pd.DataFrame(df, columns=columns) # type: ignore @@ -168,6 +180,18 @@ def update_table_data( table_type: TableModeType, data: TableDataDTO, ) -> TableDataDTO: + """ + Update the properties of the objects in the study using the provided data. + + Args: + study: The study to update the objects in. + table_type: The type of the table. + data: The new properties of the objects as a dictionary of dictionaries. + Where keys are the row names and values are dictionaries of column names and cell values. + + Returns: + The updated properties of the objects including the old ones. + """ if table_type == TableModeType.AREA: # Use AreaOutput to update properties of areas area_props_by_ids = {key: AreaOutput(**values) for key, values in data.items()} diff --git a/antarest/study/web/study_data_blueprint.py b/antarest/study/web/study_data_blueprint.py index 62ad33d970..c405c6ca28 100644 --- a/antarest/study/web/study_data_blueprint.py +++ b/antarest/study/web/study_data_blueprint.py @@ -840,6 +840,25 @@ def set_timeseries_form_values( study_service.ts_config_manager.set_field_values(study, field_values) + @bp.get( + path="/table-schema/{table_type}", + tags=[APITag.study_data], + summary="Get table schema", + ) + def get_table_schema( + table_type: TableModeType, + current_user: JWTUser = Depends(auth.get_current_user), + ) -> JSON: + """ + Get the properties of the table columns. + + Args: + - `table_type`: The type of table to get the schema for. + """ + logger.info("Getting table schema", extra={"user": current_user.id}) + model_schema = study_service.table_mode_manager.get_table_schema(table_type) + return model_schema + @bp.get( path="/studies/{uuid}/table-mode/{table_type}", tags=[APITag.study_data], @@ -848,11 +867,18 @@ def set_timeseries_form_values( def get_table_mode( uuid: str, table_type: TableModeType, - columns: str = "", + columns: str = Query("", description="A comma-separated list of columns to include in the table data"), current_user: JWTUser = Depends(auth.get_current_user), ) -> TableDataDTO: + """ + Get the table data for the given study and table type. + + Args: + - uuid: The UUID of the study. + - table_type: The type of table to get the data for. + """ logger.info( - f"Getting template table data for study {uuid}", + f"Getting table data for study {uuid}", extra={"user": current_user.id}, ) params = RequestParameters(user=current_user) @@ -861,30 +887,41 @@ def get_table_mode( table_data = study_service.table_mode_manager.get_table_data(study, table_type, column_list) return table_data - @bp.get( - path="/table-schema/{table_type}", - tags=[APITag.study_data], - summary="Get table schema", - ) - def get_table_schema( - table_type: TableModeType, - current_user: JWTUser = Depends(auth.get_current_user), - ) -> JSON: - logger.info("Getting table schema", extra={"user": current_user.id}) - model_schema = study_service.table_mode_manager.get_table_schema(table_type) - return model_schema - @bp.put( path="/studies/{uuid}/table-mode/{table_type}", tags=[APITag.study_data], - summary="Set table data with values from table form", + summary="Update table data with values from table form", ) - def set_table_mode( + def update_table_mode( uuid: str, table_type: TableModeType, - data: TableDataDTO, + data: TableDataDTO = Body( + ..., + example={ + "de / nuclear_cl1": { + "enabled": True, + "group": "Nuclear", + "unitCount": 17, + "nominalCapacity": 123, + }, + "de / gas_cl1": { + "enabled": True, + "group": "Gas", + "unitCount": 15, + "nominalCapacity": 456, + }, + }, + ), current_user: JWTUser = Depends(auth.get_current_user), ) -> TableDataDTO: + """ + Update the table data for the given study and table type. + + Args: + - uuid: The UUID of the study. + - table_type: The type of table to update. + - data: The table data to update. + """ logger.info( f"Updating table data for study {uuid}", extra={"user": current_user.id}, From 8ad85b52fa982d51315fdb431171544ac0414f13 Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Sun, 7 Apr 2024 20:18:01 +0200 Subject: [PATCH 102/147] test(tablemode): correct Table Mode unit test --- tests/integration/study_data_blueprint/test_table_mode.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integration/study_data_blueprint/test_table_mode.py b/tests/integration/study_data_blueprint/test_table_mode.py index 334230dcb4..fa6034eae7 100644 --- a/tests/integration/study_data_blueprint/test_table_mode.py +++ b/tests/integration/study_data_blueprint/test_table_mode.py @@ -89,7 +89,7 @@ def test_lifecycle__nominal( "averageUnsuppliedEnergyCost": 3456, "dispatchableHydroPower": False, "filterSynthesis": "daily, monthly", # not changed - "filterYearByYear": "prout", + "filterYearByYear": "annual, weekly", }, "es": { "adequacyPatchMode": "inside", From de1c0d530ab37b159c7dddac8153c1210daee47f Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Mon, 8 Apr 2024 10:43:14 +0200 Subject: [PATCH 103/147] feat(tablemode): avoid sending columns with empty values --- antarest/study/business/table_mode_management.py | 6 +++--- tests/integration/study_data_blueprint/test_table_mode.py | 6 +----- 2 files changed, 4 insertions(+), 8 deletions(-) diff --git a/antarest/study/business/table_mode_management.py b/antarest/study/business/table_mode_management.py index d72c03a1b3..48d6aefe82 100644 --- a/antarest/study/business/table_mode_management.py +++ b/antarest/study/business/table_mode_management.py @@ -129,9 +129,9 @@ def get_table_data( # Create a new dataframe with the listed columns. df = pd.DataFrame(df, columns=columns) # type: ignore - # If a column does not exist in the DataFrame, it is created with empty values. - # noinspection PyTypeChecker - df = df.where(pd.notna(df), other=None) + # According to the study version, some properties may not be present, + # so we need to drop columns that are all NaN. + df = df.dropna(axis=1, how="all") obj = df.to_dict(orient="index") diff --git a/tests/integration/study_data_blueprint/test_table_mode.py b/tests/integration/study_data_blueprint/test_table_mode.py index fa6034eae7..309ce2bdfd 100644 --- a/tests/integration/study_data_blueprint/test_table_mode.py +++ b/tests/integration/study_data_blueprint/test_table_mode.py @@ -731,7 +731,7 @@ def test_lifecycle__nominal( "injectionNominalCapacity", "withdrawalNominalCapacity", "reservoirCapacity", - "unknowColumn", + "unknowColumn", # should be ignored ] ), }, @@ -742,28 +742,24 @@ def test_lifecycle__nominal( "group": "Battery", "injectionNominalCapacity": 1550, "reservoirCapacity": 1500, - "unknowColumn": None, "withdrawalNominalCapacity": 1550, }, "fr / tesla": { "group": "Battery", "injectionNominalCapacity": 1200, "reservoirCapacity": 1200, - "unknowColumn": None, "withdrawalNominalCapacity": 1200, }, "it / storage3": { "group": "Pondage", "injectionNominalCapacity": 1234, "reservoirCapacity": 1357, - "unknowColumn": None, "withdrawalNominalCapacity": 1020, }, "it / storage4": { "group": "PSP_open", "injectionNominalCapacity": 567, "reservoirCapacity": 500, - "unknowColumn": None, "withdrawalNominalCapacity": 456, }, } From 1fad0f18832c1a00458e6f45a27f69738928ef7f Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Mon, 8 Apr 2024 16:22:47 +0200 Subject: [PATCH 104/147] feat(tablemode): improve validation of filtering fields --- .../storage/rawstudy/model/filesystem/config/area.py | 9 +++------ .../rawstudy/model/filesystem/config/field_validators.py | 2 -- .../storage/rawstudy/model/filesystem/config/links.py | 9 +++------ 3 files changed, 6 insertions(+), 14 deletions(-) diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/area.py b/antarest/study/storage/rawstudy/model/filesystem/config/area.py index 458f1185aa..bb6d972e65 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/config/area.py +++ b/antarest/study/storage/rawstudy/model/filesystem/config/area.py @@ -78,12 +78,9 @@ class FilteringSection(IniProperties): filter_synthesis: str = Field("", alias="filter-synthesis") filter_year_by_year: str = Field("", alias="filter-year-by-year") - _validate_filtering = validator( - "filter_synthesis", - "filter_year_by_year", - pre=True, - allow_reuse=True, - )(validate_filtering) + @validator("filter_synthesis", "filter_year_by_year", pre=True) + def _validate_filtering(cls, v: t.Any) -> str: + return validate_filtering(v) # noinspection SpellCheckingInspection class ModalOptimizationSection(IniProperties): diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/field_validators.py b/antarest/study/storage/rawstudy/model/filesystem/config/field_validators.py index f8044d786c..74f93f5c46 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/config/field_validators.py +++ b/antarest/study/storage/rawstudy/model/filesystem/config/field_validators.py @@ -1,8 +1,6 @@ -import re import typing as t _ALL_FILTERING = ["hourly", "daily", "weekly", "monthly", "annual"] -_find_all_filtering = re.compile("|".join(_ALL_FILTERING)).findall def extract_filtering(v: t.Any) -> t.Sequence[str]: diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/links.py b/antarest/study/storage/rawstudy/model/filesystem/config/links.py index 664941ed96..9e5dcf8747 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/config/links.py +++ b/antarest/study/storage/rawstudy/model/filesystem/config/links.py @@ -134,12 +134,9 @@ class LinkProperties(IniProperties): description="color of the area in the map", ) - _validate_filtering = validator( - "filter_synthesis", - "filter_year_by_year", - pre=True, - allow_reuse=True, - )(validate_filtering) + @validator("filter_synthesis", "filter_year_by_year", pre=True) + def _validate_filtering(cls, v: t.Any) -> str: + return validate_filtering(v) _validate_color_rgb = validator("color_rgb", pre=True, allow_reuse=True)(validate_color_rgb) From 9f22cde7f47d4a1b9299c52b715c43fa1e61867a Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Mon, 8 Apr 2024 18:56:38 +0200 Subject: [PATCH 105/147] feat(tablemode): improve DataFrame to JSON conversion --- .../study/business/table_mode_management.py | 41 ++----------------- 1 file changed, 3 insertions(+), 38 deletions(-) diff --git a/antarest/study/business/table_mode_management.py b/antarest/study/business/table_mode_management.py index 48d6aefe82..fbe034cde9 100644 --- a/antarest/study/business/table_mode_management.py +++ b/antarest/study/business/table_mode_management.py @@ -1,6 +1,7 @@ import collections import typing as t +import numpy as np import pandas as pd from antarest.core.model import JSON @@ -133,46 +134,10 @@ def get_table_data( # so we need to drop columns that are all NaN. df = df.dropna(axis=1, how="all") - obj = df.to_dict(orient="index") - # Convert NaN to `None` because it is not JSON-serializable - for row in obj.values(): - for key, value in row.items(): - if pd.isna(value): - row[key] = None - - return t.cast(TableDataDTO, obj) - - file_study = self.storage_service.get_storage(study).get_raw(study) - columns_model = COLUMNS_MODELS_BY_TYPE[table_type] - glob_object = _get_glob_object(file_study, table_type) - schema_columns = columns_model.schema()["properties"] - - def get_column_value(col: str, data: t.Dict[str, t.Any]) -> t.Any: - schema = schema_columns[col] - relative_path = _get_relative_path(table_type, schema["path"]) - return _get_value(relative_path, data, schema["default"]) - - if table_type == TableTemplateType.AREA: - return { - area_id: columns_model.construct(**{col: get_column_value(col, data) for col in columns}) # type: ignore - for area_id, data in glob_object.items() - } - - if table_type == TableTemplateType.BINDING_CONSTRAINT: - return { - data["id"]: columns_model.construct(**{col: get_column_value(col, data) for col in columns}) # type: ignore - for data in glob_object.values() - } - - obj: t.Dict[str, t.Any] = {} - for id_1, value_1 in glob_object.items(): - for id_2, value_2 in value_1.items(): - obj[f"{id_1} / {id_2}"] = columns_model.construct( - **{col: get_column_value(col, value_2) for col in columns} - ) + df.replace(np.nan, None, inplace=True) - return obj + return t.cast(TableDataDTO, df.to_dict(orient="index")) def update_table_data( self, From e9e0a89e7af0c63c6d1282afb27da16e20b5acd7 Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Tue, 9 Apr 2024 17:39:08 +0200 Subject: [PATCH 106/147] feat(tablemode): validate `filter_synthesis`, `filter_year_by_year` fields for BC --- .../model/command/create_binding_constraint.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py b/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py index 15e5e72f09..b5397e4513 100644 --- a/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py +++ b/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py @@ -3,7 +3,7 @@ from abc import ABCMeta import numpy as np -from pydantic import BaseModel, Extra, Field, root_validator +from pydantic import BaseModel, Extra, Field, root_validator, validator from antarest.matrixstore.model import MatrixData from antarest.study.business.all_optional_meta import AllOptionalMetaclass @@ -11,6 +11,7 @@ BindingConstraintFrequency, BindingConstraintOperator, ) +from antarest.study.storage.rawstudy.model.filesystem.config.field_validators import validate_filtering from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig, transform_name_to_id from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy from antarest.study.storage.variantstudy.business.matrix_constants_generator import GeneratorMatrixConstants @@ -129,6 +130,10 @@ def create_binding_constraint_config(study_version: t.Union[str, int], **kwargs: cls = get_binding_constraint_config_cls(study_version) return cls.from_dict(**kwargs) + @validator("filter_synthesis", "filter_year_by_year", pre=True) + def _validate_filtering(cls, v: t.Any) -> str: + return validate_filtering(v) + class OptionalProperties(BindingConstraintProperties870, metaclass=AllOptionalMetaclass, use_none=True): pass From 72aa56cb6de9604f17528567a8c310f76fdd0908 Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Tue, 9 Apr 2024 17:48:24 +0200 Subject: [PATCH 107/147] feat(tablemode): remove `colorRgb` column from Table template --- antarest/study/business/area_management.py | 25 +------------------ .../api/studies/tableMode/constants.ts | 2 -- 2 files changed, 1 insertion(+), 26 deletions(-) diff --git a/antarest/study/business/area_management.py b/antarest/study/business/area_management.py index 86cfeaddfd..94c0f6bb2f 100644 --- a/antarest/study/business/area_management.py +++ b/antarest/study/business/area_management.py @@ -130,17 +130,12 @@ class _BaseAreaDTO( Aggregates the fields of the `OptimizationProperties` and `AdequacyPathProperties` classes, but without the `UIProperties` fields. - Add the `color_rgb` field extracted from `/input/areas//ui.ini` information. - Add the fields extracted from the `/input/thermal/areas.ini` information: - `average_unsupplied_energy_cost` is extracted from `unserverd_energy_cost`, - `average_spilled_energy_cost` is extracted from `spilled_energy_cost`. """ - # Extra field which represents the Area color - color_rgb: str = Field("#E66C2C", description="color of the area in the map") - average_unsupplied_energy_cost: float = Field(0.0, description="average unserverd energy cost (€/MWh)") average_spilled_energy_cost: float = Field(0.0, description="average spilled energy cost (€/MWh)") @@ -171,7 +166,6 @@ def from_model( The `GetAreaDTO` object. """ obj = { - "color_rgb": area_folder.ui.style.color_rgb, "average_unsupplied_energy_cost": average_unsupplied_energy_cost, "average_spilled_energy_cost": average_spilled_energy_cost, **area_folder.optimization.filtering.dict(by_alias=False), @@ -196,17 +190,12 @@ def _to_adequacy_patch(self) -> AdequacyPathProperties: adequacy_path_section = AdequacyPathProperties.AdequacyPathSection(**obj) return AdequacyPathProperties(adequacy_patch=adequacy_path_section) - def _to_ui(self) -> UIProperties: - """Construct a partially initialized UI object with only the Area color.""" - style_section = AreaUI(color_rgb=self.color_rgb) - return UIProperties(style=style_section) - @property def area_folder(self) -> AreaFolder: area_folder = AreaFolder( optimization=self._to_optimization(), adequacy_patch=self._to_adequacy_patch(), - ui=self._to_ui(), + # UI properties are not configurable in Table Mode ) return area_folder @@ -331,18 +320,6 @@ def update_areas_props( command_context=command_context, ) ) - if old_area_folder.ui != new_area_folder.ui: - # update only the color fields, one by one - _color_fields = {"color_r", "color_g", "color_b"} - _data = new_area_folder.ui.style.to_config() - for _color_field in _color_fields: - commands.append( - UpdateConfig( - target=f"input/areas/{area_id}/ui/ui/{_color_field}", - data=_data[_color_field], - command_context=command_context, - ) - ) if old_area.average_unsupplied_energy_cost != new_area.average_unsupplied_energy_cost: commands.append( UpdateConfig( diff --git a/webapp/src/services/api/studies/tableMode/constants.ts b/webapp/src/services/api/studies/tableMode/constants.ts index 22c19310f1..bdbef1a7e9 100644 --- a/webapp/src/services/api/studies/tableMode/constants.ts +++ b/webapp/src/services/api/studies/tableMode/constants.ts @@ -15,8 +15,6 @@ export const TABLE_MODE_TYPES = [ export const TABLE_MODE_COLUMNS_BY_TYPE = { [AREAS]: [ - // UI - "colorRgb", // Optimization - Nodal optimization "nonDispatchablePower", "dispatchableHydroPower", From d642c139032728df8e65f71f166ef5a23e11a735 Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Wed, 10 Apr 2024 16:06:26 +0200 Subject: [PATCH 108/147] test(tablemode): correct unit tests ("colorRgb" is removed) --- .../integration/study_data_blueprint/test_table_mode.py | 9 --------- 1 file changed, 9 deletions(-) diff --git a/tests/integration/study_data_blueprint/test_table_mode.py b/tests/integration/study_data_blueprint/test_table_mode.py index 309ce2bdfd..dceefc718d 100644 --- a/tests/integration/study_data_blueprint/test_table_mode.py +++ b/tests/integration/study_data_blueprint/test_table_mode.py @@ -64,8 +64,6 @@ def test_lifecycle__nominal( assert res.status_code == 200, res.json() actual = res.json() assert set(actual["properties"]) == { - # UI - "colorRgb", # Optimization - Nodal optimization "nonDispatchablePower", "dispatchableHydroPower", @@ -95,9 +93,6 @@ def test_lifecycle__nominal( "adequacyPatchMode": "inside", "spreadSpilledEnergyCost": None, # not changed }, - "fr": { - "colorRgb": "#C00000", - }, }, ) assert res.status_code == 200, res.json() @@ -106,7 +101,6 @@ def test_lifecycle__nominal( "adequacyPatchMode": "outside", "averageSpilledEnergyCost": 0, "averageUnsuppliedEnergyCost": 3456, - "colorRgb": "#0080FF", "dispatchableHydroPower": False, "filterSynthesis": "daily, monthly", "filterYearByYear": "weekly, annual", @@ -119,7 +113,6 @@ def test_lifecycle__nominal( "adequacyPatchMode": "inside", "averageSpilledEnergyCost": 0, "averageUnsuppliedEnergyCost": 3000, - "colorRgb": "#0080FF", "dispatchableHydroPower": True, "filterSynthesis": "daily, monthly", "filterYearByYear": "hourly, weekly, annual", @@ -132,7 +125,6 @@ def test_lifecycle__nominal( "adequacyPatchMode": "outside", "averageSpilledEnergyCost": 0, "averageUnsuppliedEnergyCost": 3000, - "colorRgb": "#C00000", "dispatchableHydroPower": True, "filterSynthesis": "", "filterYearByYear": "hourly", @@ -145,7 +137,6 @@ def test_lifecycle__nominal( "adequacyPatchMode": "outside", "averageSpilledEnergyCost": 0, "averageUnsuppliedEnergyCost": 3000, - "colorRgb": "#0080FF", "dispatchableHydroPower": True, "filterSynthesis": "", "filterYearByYear": "hourly", From bc1285c5fc49fd303f701a6110dd26836ca5dd35 Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Wed, 10 Apr 2024 16:18:14 +0200 Subject: [PATCH 109/147] feat(tablemode): change in `TableModeType` to handle aliases of old table types --- antarest/study/business/table_mode_management.py | 16 ++++++++++++++++ .../study_data_blueprint/test_table_mode.py | 11 +++++++++++ 2 files changed, 27 insertions(+) diff --git a/antarest/study/business/table_mode_management.py b/antarest/study/business/table_mode_management.py index fbe034cde9..cbbf5358cc 100644 --- a/antarest/study/business/table_mode_management.py +++ b/antarest/study/business/table_mode_management.py @@ -46,6 +46,22 @@ class TableModeType(EnumIgnoreCase): # Avoid "constraints" because we may have other kinds of constraints in the future BINDING_CONSTRAINT = "binding-constraints" + @classmethod + def _missing_(cls, value: object) -> t.Optional["EnumIgnoreCase"]: + if isinstance(value, str): + # handle aliases of old table types + value = value.upper() + aliases = { + "AREA": cls.AREA, + "LINK": cls.LINK, + "CLUSTER": cls.THERMAL, + "RENEWABLE": cls.RENEWABLE, + "BINDING CONSTRAINT": cls.BINDING_CONSTRAINT, + } + if value in aliases: + return aliases[value] + return super()._missing_(value) + class TableModeManager: def __init__( diff --git a/tests/integration/study_data_blueprint/test_table_mode.py b/tests/integration/study_data_blueprint/test_table_mode.py index dceefc718d..84cd61df5b 100644 --- a/tests/integration/study_data_blueprint/test_table_mode.py +++ b/tests/integration/study_data_blueprint/test_table_mode.py @@ -881,3 +881,14 @@ def test_lifecycle__nominal( } actual = res.json() assert actual == expected + + +def test_table_type_aliases(client: TestClient, user_access_token: str) -> None: + """ + Ensure that we can use the old table type aliases to get the schema of the tables. + """ + user_headers = {"Authorization": f"Bearer {user_access_token}"} + # do not use `pytest.mark.parametrize`, because it is too slow + for table_type in ["area", "link", "cluster", "renewable", "binding constraint"]: + res = client.get(f"/v1/table-schema/{table_type}", headers=user_headers) + assert res.status_code == 200, f"Failed to get schema for {table_type}: {res.json()}" From b542ef867b0e893db70449cf73018626282efde4 Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Fri, 19 Apr 2024 10:12:44 +0200 Subject: [PATCH 110/147] refactor(config): refactor validation of colors in Area and Link models --- .../study/storage/rawstudy/model/filesystem/config/area.py | 4 +++- .../study/storage/rawstudy/model/filesystem/config/links.py | 4 +++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/area.py b/antarest/study/storage/rawstudy/model/filesystem/config/area.py index bb6d972e65..b16c002227 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/config/area.py +++ b/antarest/study/storage/rawstudy/model/filesystem/config/area.py @@ -166,7 +166,9 @@ class AreaUI(IniProperties): description="color of the area in the map", ) - _validate_color_rgb = validator("color_rgb", pre=True, allow_reuse=True)(validate_color_rgb) + @validator("color_rgb", pre=True) + def _validate_color_rgb(cls, v: t.Any) -> str: + return validate_color_rgb(v) @root_validator(pre=True) def _validate_colors(cls, values: t.MutableMapping[str, t.Any]) -> t.Mapping[str, t.Any]: diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/links.py b/antarest/study/storage/rawstudy/model/filesystem/config/links.py index 9e5dcf8747..7ebc0e2176 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/config/links.py +++ b/antarest/study/storage/rawstudy/model/filesystem/config/links.py @@ -138,7 +138,9 @@ class LinkProperties(IniProperties): def _validate_filtering(cls, v: t.Any) -> str: return validate_filtering(v) - _validate_color_rgb = validator("color_rgb", pre=True, allow_reuse=True)(validate_color_rgb) + @validator("color_rgb", pre=True) + def _validate_color_rgb(cls, v: t.Any) -> str: + return validate_color_rgb(v) @root_validator(pre=True) def _validate_colors(cls, values: t.MutableMapping[str, t.Any]) -> t.Mapping[str, t.Any]: From 7cb7166373e850ec3eb124e01ca46b5b05088b8b Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Fri, 19 Apr 2024 11:19:29 +0200 Subject: [PATCH 111/147] fix(area): correct endpoint to update Area UI --- antarest/study/business/area_management.py | 57 ++++++++++++++++++- antarest/study/service.py | 16 +++--- antarest/study/web/study_data_blueprint.py | 5 +- .../storage/business/test_arealink_manager.py | 7 +-- 4 files changed, 70 insertions(+), 15 deletions(-) diff --git a/antarest/study/business/area_management.py b/antarest/study/business/area_management.py index 94c0f6bb2f..f13dfc5e28 100644 --- a/antarest/study/business/area_management.py +++ b/antarest/study/business/area_management.py @@ -70,6 +70,53 @@ class LayerInfoDTO(BaseModel): areas: t.List[str] +class UpdateAreaUi(BaseModel, extra="forbid", allow_population_by_field_name=True): + """ + DTO for updating area UI + + Usage: + + >>> from antarest.study.business.area_management import UpdateAreaUi + >>> from pprint import pprint + + >>> obj = { + ... "x": -673.75, + ... "y": 301.5, + ... "color_rgb": [230, 108, 44], + ... "layerX": {"0": -230, "4": -230, "6": -95, "7": -230, "8": -230}, + ... "layerY": {"0": 136, "4": 136, "6": 39, "7": 136, "8": 136}, + ... "layerColor": { + ... "0": "230, 108, 44", + ... "4": "230, 108, 44", + ... "6": "230, 108, 44", + ... "7": "230, 108, 44", + ... "8": "230, 108, 44", + ... }, + ... } + + >>> model = UpdateAreaUi(**obj) + >>> pprint(model.dict(by_alias=True), width=80) + {'colorRgb': [230, 108, 44], + 'layerColor': {0: '230, 108, 44', + 4: '230, 108, 44', + 6: '230, 108, 44', + 7: '230, 108, 44', + 8: '230, 108, 44'}, + 'layerX': {0: -230, 4: -230, 6: -95, 7: -230, 8: -230}, + 'layerY': {0: 136, 4: 136, 6: 39, 7: 136, 8: 136}, + 'x': -673, + 'y': 301} + + """ + + x: int = Field(title="X position") + y: int = Field(title="Y position") + color_rgb: t.Sequence[int] = Field(title="RGB color", alias="colorRgb") + layer_x: t.Mapping[int, int] = Field(default_factory=dict, title="X position of each layer", alias="layerX") + layer_y: t.Mapping[int, int] = Field(default_factory=dict, title="Y position of each layer", alias="layerY") + layer_color: t.Mapping[int, str] = Field(default_factory=dict, title="Color of each layer", alias="layerColor") + + def _get_ui_info_map(file_study: FileStudy, area_ids: t.Sequence[str]) -> t.Dict[str, t.Any]: """ Get the UI information (a JSON object) for each selected Area. @@ -601,8 +648,14 @@ def update_area_metadata( set=area_or_set.get_areas(list(file_study.config.areas)) if isinstance(area_or_set, DistrictSet) else [], ) - def update_area_ui(self, study: Study, area_id: str, area_ui: AreaUI, layer: str = "0") -> None: - obj = area_ui.to_config() + def update_area_ui(self, study: Study, area_id: str, area_ui: UpdateAreaUi, layer: str = "0") -> None: + obj = { + "x": area_ui.x, + "y": area_ui.y, + "color_r": area_ui.color_rgb[0], + "color_g": area_ui.color_rgb[1], + "color_b": area_ui.color_rgb[2], + } file_study = self.storage_service.get_storage(study).get_raw(study) commands = ( [ diff --git a/antarest/study/service.py b/antarest/study/service.py index 5bbdca35e2..66b3b2ddad 100644 --- a/antarest/study/service.py +++ b/antarest/study/service.py @@ -49,7 +49,7 @@ from antarest.study.business.adequacy_patch_management import AdequacyPatchManager from antarest.study.business.advanced_parameters_management import AdvancedParamsManager from antarest.study.business.allocation_management import AllocationManager -from antarest.study.business.area_management import AreaCreationDTO, AreaInfoDTO, AreaManager, AreaType +from antarest.study.business.area_management import AreaCreationDTO, AreaInfoDTO, AreaManager, AreaType, UpdateAreaUi from antarest.study.business.areas.hydro_management import HydroManager from antarest.study.business.areas.properties_management import PropertiesManager from antarest.study.business.areas.renewable_management import RenewableManager @@ -1292,11 +1292,13 @@ def export_task(notifier: TaskUpdateNotifier) -> TaskResult: ) return FileResponse( tmp_export_file, - headers={"Content-Disposition": "inline"} - if filetype == ExportFormat.JSON - else { - "Content-Disposition": f'attachment; filename="output-{output_id}.{"tar.gz" if filetype == ExportFormat.TAR_GZ else "zip"}' - }, + headers=( + {"Content-Disposition": "inline"} + if filetype == ExportFormat.JSON + else { + "Content-Disposition": f'attachment; filename="output-{output_id}.{"tar.gz" if filetype == ExportFormat.TAR_GZ else "zip"}' + } + ), media_type=filetype, ) else: @@ -1862,7 +1864,7 @@ def update_area_ui( self, uuid: str, area_id: str, - area_ui: AreaUI, + area_ui: UpdateAreaUi, layer: str, params: RequestParameters, ) -> None: diff --git a/antarest/study/web/study_data_blueprint.py b/antarest/study/web/study_data_blueprint.py index c405c6ca28..11d5945bd2 100644 --- a/antarest/study/web/study_data_blueprint.py +++ b/antarest/study/web/study_data_blueprint.py @@ -18,7 +18,8 @@ from antarest.study.business.adequacy_patch_management import AdequacyPatchFormFields from antarest.study.business.advanced_parameters_management import AdvancedParamsFormFields from antarest.study.business.allocation_management import AllocationFormFields, AllocationMatrix -from antarest.study.business.area_management import AreaCreationDTO, AreaInfoDTO, AreaType, LayerInfoDTO +from antarest.study.business.area_management import AreaCreationDTO, AreaInfoDTO, AreaType, LayerInfoDTO, \ + UpdateAreaUi from antarest.study.business.areas.hydro_management import InflowStructure, ManagementOptionsFormFields from antarest.study.business.areas.properties_management import PropertiesFormFields from antarest.study.business.areas.renewable_management import ( @@ -189,7 +190,7 @@ def create_link( def update_area_ui( uuid: str, area_id: str, - area_ui: AreaUI, + area_ui: UpdateAreaUi, layer: str = "0", current_user: JWTUser = Depends(auth.get_current_user), ) -> t.Any: diff --git a/tests/storage/business/test_arealink_manager.py b/tests/storage/business/test_arealink_manager.py index af01133b8c..a8beff5fc0 100644 --- a/tests/storage/business/test_arealink_manager.py +++ b/tests/storage/business/test_arealink_manager.py @@ -11,12 +11,11 @@ from antarest.core.utils.fastapi_sqlalchemy import db from antarest.matrixstore.repository import MatrixContentRepository from antarest.matrixstore.service import SimpleMatrixService -from antarest.study.business.area_management import AreaCreationDTO, AreaManager, AreaType +from antarest.study.business.area_management import AreaCreationDTO, AreaManager, AreaType, UpdateAreaUi from antarest.study.business.link_management import LinkInfoDTO, LinkManager from antarest.study.model import Patch, PatchArea, PatchCluster, RawStudy, StudyAdditionalData from antarest.study.repository import StudyMetadataRepository from antarest.study.storage.patch_service import PatchService -from antarest.study.storage.rawstudy.model.filesystem.config.area import AreaUI from antarest.study.storage.rawstudy.model.filesystem.config.files import build from antarest.study.storage.rawstudy.model.filesystem.config.model import Area, DistrictSet, FileStudyTreeConfig, Link from antarest.study.storage.rawstudy.model.filesystem.config.thermal import ThermalConfig @@ -112,7 +111,7 @@ def test_area_crud(empty_study: FileStudy, matrix_service: SimpleMatrixService): assert len(empty_study.config.areas.keys()) == 1 assert json.loads((empty_study.config.study_path / "patch.json").read_text())["areas"]["test"]["country"] is None - area_manager.update_area_ui(study, "test", AreaUI(x=100, y=200, color_rgb=(255, 0, 100))) + area_manager.update_area_ui(study, "test", UpdateAreaUi(x=100, y=200, color_rgb=(255, 0, 100))) assert empty_study.tree.get(["input", "areas", "test", "ui", "ui"]) == { "x": 100, "y": 200, @@ -158,7 +157,7 @@ def test_area_crud(empty_study: FileStudy, matrix_service: SimpleMatrixService): assert (empty_study.config.study_path / "patch.json").exists() assert json.loads((empty_study.config.study_path / "patch.json").read_text())["areas"]["test"]["country"] == "FR" - area_manager.update_area_ui(study, "test", AreaUI(x=100, y=200, color_rgb=(255, 0, 100))) + area_manager.update_area_ui(study, "test", UpdateAreaUi(x=100, y=200, color_rgb=(255, 0, 100))) variant_study_service.append_commands.assert_called_with( variant_id, [ From e1673005cb5a5bda44e70bcc15a7c56c97a778ee Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Mon, 22 Apr 2024 15:27:48 +0200 Subject: [PATCH 112/147] refactor(api-ui): update studydata --- webapp/src/services/api/studydata.ts | 30 +++++++--------------------- 1 file changed, 7 insertions(+), 23 deletions(-) diff --git a/webapp/src/services/api/studydata.ts b/webapp/src/services/api/studydata.ts index 6558a3d15f..a39b55f20b 100644 --- a/webapp/src/services/api/studydata.ts +++ b/webapp/src/services/api/studydata.ts @@ -1,5 +1,4 @@ import { - AllClustersAndLinks, LinkCreationInfoDTO, LinkInfoWithUI, UpdateAreaUi, @@ -16,7 +15,7 @@ export const createArea = async ( uuid: string, name: string, ): Promise => { - const res = await client.post(`/v1/studies/${uuid}/areas?uuid=${uuid}`, { + const res = await client.post(`/v1/studies/${uuid}/areas`, { name, type: "AREA", }); @@ -27,10 +26,7 @@ export const createLink = async ( uuid: string, linkCreationInfo: LinkCreationInfoDTO, ): Promise => { - const res = await client.post( - `/v1/studies/${uuid}/links?uuid=${uuid}`, - linkCreationInfo, - ); + const res = await client.post(`/v1/studies/${uuid}/links`, linkCreationInfo); return res.data; }; @@ -41,7 +37,7 @@ export const updateAreaUI = async ( areaUi: UpdateAreaUi, ): Promise => { const res = await client.put( - `/v1/studies/${uuid}/areas/${areaId}/ui?uuid=${uuid}&area_id=${areaId}&layer=${layerId}`, + `/v1/studies/${uuid}/areas/${areaId}/ui?layer=${layerId}`, areaUi, ); return res.data; @@ -51,9 +47,7 @@ export const deleteArea = async ( uuid: string, areaId: string, ): Promise => { - const res = await client.delete( - `/v1/studies/${uuid}/areas/${areaId}?uuid=${uuid}&area_id=${areaId}`, - ); + const res = await client.delete(`/v1/studies/${uuid}/areas/${areaId}`); return res.data; }; @@ -63,7 +57,7 @@ export const deleteLink = async ( areaIdTo: string, ): Promise => { const res = await client.delete( - `/v1/studies/${uuid}/links/${areaIdFrom}/${areaIdTo}?uuid=${uuid}&area_from=${areaIdFrom}&area_to=${areaIdTo}`, + `/v1/studies/${uuid}/links/${areaIdFrom}/${areaIdTo}`, ); return res.data; }; @@ -156,13 +150,6 @@ export const createBindingConstraint = async ( return res.data; }; -export const getClustersAndLinks = async ( - uuid: string, -): Promise => { - const res = await client.get(`/v1/studies/${uuid}/linksandclusters`); - return res.data; -}; - interface GetAllLinksParams { uuid: string; withUi?: boolean; @@ -176,10 +163,7 @@ export const getAllLinks = async ( params: T, ): Promise>> => { const { uuid, withUi } = params; - const res = await client.get( - `/v1/studies/${uuid}/links${withUi ? `?with_ui=${withUi}` : ""}`, - ); + const withUiStr = withUi ? "with_ui=true" : ""; + const res = await client.get(`/v1/studies/${uuid}/links?${withUiStr}`); return res.data; }; - -export default {}; From 8c42941cc264712497a392c93261616468447427 Mon Sep 17 00:00:00 2001 From: Samir Kamal <1954121+skamril@users.noreply.github.com> Date: Mon, 22 Apr 2024 17:08:25 +0200 Subject: [PATCH 113/147] fix(storage): issue to run server --- .../storage/rawstudy/model/filesystem/config/area.py | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/area.py b/antarest/study/storage/rawstudy/model/filesystem/config/area.py index b16c002227..5ade25159f 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/config/area.py +++ b/antarest/study/storage/rawstudy/model/filesystem/config/area.py @@ -4,7 +4,6 @@ import typing as t -import typing_extensions as te from pydantic import Field, root_validator, validator from antarest.study.business.enum_ignore_case import EnumIgnoreCase @@ -468,9 +467,6 @@ class AreaFolder(IniProperties): ) -EnergyCost = te.Annotated[float, Field(ge=0, description="Energy cost (€/MWh)")] - - # noinspection SpellCheckingInspection class ThermalAreasProperties(IniProperties): """ @@ -526,20 +522,20 @@ class ThermalAreasProperties(IniProperties): 'unserverdenergycost': {'at': 6500.0, 'be': 3500.0, 'de': 1250.0, 'fr': 0.0}} """ - unserverd_energy_cost: t.MutableMapping[str, EnergyCost] = Field( + unserverd_energy_cost: t.MutableMapping[str, float] = Field( default_factory=dict, alias="unserverdenergycost", description="unserverd energy cost (€/MWh) of each area", ) - spilled_energy_cost: t.MutableMapping[str, EnergyCost] = Field( + spilled_energy_cost: t.MutableMapping[str, float] = Field( default_factory=dict, alias="spilledenergycost", description="spilled energy cost (€/MWh) of each area", ) @validator("unserverd_energy_cost", "spilled_energy_cost", pre=True) - def _validate_energy_cost(cls, v: t.Any) -> t.MutableMapping[str, EnergyCost]: + def _validate_energy_cost(cls, v: t.Any) -> t.MutableMapping[str, float]: if isinstance(v, dict): return {str(k): float(v) for k, v in v.items()} raise TypeError(f"Invalid type for energy cost: {type(v)}") From e2e44c6ad37e8bbf8249fc1a55db28d8e359bbdd Mon Sep 17 00:00:00 2001 From: Samir Kamal <1954121+skamril@users.noreply.github.com> Date: Tue, 23 Apr 2024 12:28:33 +0200 Subject: [PATCH 114/147] feat(tablemode-ui): only display columns of study version --- webapp/src/components/common/TableMode.tsx | 46 +++++++++++++++---- .../api/studies/tableMode/constants.ts | 20 ++++---- 2 files changed, 47 insertions(+), 19 deletions(-) diff --git a/webapp/src/components/common/TableMode.tsx b/webapp/src/components/common/TableMode.tsx index fa34e1bf64..e5770d79d2 100644 --- a/webapp/src/components/common/TableMode.tsx +++ b/webapp/src/components/common/TableMode.tsx @@ -1,3 +1,4 @@ +import { useEffect, useState } from "react"; import { StudyMetadata } from "../../common/types"; import usePromise from "../../hooks/usePromise"; import { @@ -12,6 +13,8 @@ import { import { SubmitHandlerPlus } from "./Form/types"; import TableForm from "./TableForm"; import UsePromiseCond from "./utils/UsePromiseCond"; +import GridOffIcon from "@mui/icons-material/GridOff"; +import SimpleContent from "./page/SimpleContent"; export interface TableModeProps { studyId: StudyMetadata["id"]; @@ -21,10 +24,31 @@ export interface TableModeProps { function TableMode(props: TableModeProps) { const { studyId, type, columns } = props; + const [filteredColumns, setFilteredColumns] = useState(columns); const res = usePromise( () => getTableMode({ studyId, tableType: type, columns }), - [studyId, type, JSON.stringify(columns)], + [studyId, type, columns.join(",")], + ); + + // Filter columns based on the data received, because the API may return + // fewer columns than requested depending on the study version + useEffect( + () => { + const dataKeys = Object.keys(res.data || {}); + + if (dataKeys.length === 0) { + setFilteredColumns([]); + return; + } + + const data = res.data!; + const dataRowKeys = Object.keys(data[dataKeys[0]]); + + setFilteredColumns(columns.filter((col) => dataRowKeys.includes(col))); + }, + // eslint-disable-next-line react-hooks/exhaustive-deps + [res.data, columns.join(",")], ); //////////////////////////////////////////////////////////////// @@ -42,14 +66,18 @@ function TableMode(props: TableModeProps) { return ( ( - - )} + ifResolved={(data) => + filteredColumns.length > 0 ? ( + + ) : ( + } title="study.results.noData" /> + ) + } /> ); } diff --git a/webapp/src/services/api/studies/tableMode/constants.ts b/webapp/src/services/api/studies/tableMode/constants.ts index bdbef1a7e9..e75e163cc3 100644 --- a/webapp/src/services/api/studies/tableMode/constants.ts +++ b/webapp/src/services/api/studies/tableMode/constants.ts @@ -15,7 +15,6 @@ export const TABLE_MODE_TYPES = [ export const TABLE_MODE_COLUMNS_BY_TYPE = { [AREAS]: [ - // Optimization - Nodal optimization "nonDispatchablePower", "dispatchableHydroPower", "otherDispatchablePower", @@ -23,10 +22,9 @@ export const TABLE_MODE_COLUMNS_BY_TYPE = { "spreadUnsuppliedEnergyCost", "averageSpilledEnergyCost", "spreadSpilledEnergyCost", - // Optimization - Filtering "filterSynthesis", "filterYearByYear", - // Adequacy patch + // Since v8.3 "adequacyPatchMode", ], [LINKS]: [ @@ -37,14 +35,12 @@ export const TABLE_MODE_COLUMNS_BY_TYPE = { "assetType", "linkStyle", "linkWidth", - "comments", // unknown field?! + "comments", "displayComments", - // Optimization - Filtering "filterSynthesis", "filterYearByYear", ], [THERMALS]: [ - // "name" is read-only "group", "enabled", "unitCount", @@ -64,8 +60,8 @@ export const TABLE_MODE_COLUMNS_BY_TYPE = { "fixedCost", "startupCost", "marketBidCost", - // Pollutants - since v8.6 (except for "co2") "co2", + // Since v8.6 "nh3", "so2", "nox", @@ -84,6 +80,7 @@ export const TABLE_MODE_COLUMNS_BY_TYPE = { "variableOMCost", ], [RENEWABLES]: [ + // Since v8.1 "group", "enabled", "tsInterpretation", @@ -91,23 +88,26 @@ export const TABLE_MODE_COLUMNS_BY_TYPE = { "nominalCapacity", ], [ST_STORAGES]: [ + // Since v8.6 "group", - // "enabled", // since v8.8 "injectionNominalCapacity", "withdrawalNominalCapacity", "reservoirCapacity", "efficiency", "initialLevel", "initialLevelOptim", + // Since v8.8 + "enabled", ], [BINDING_CONSTRAINTS]: [ - "group", "enabled", "timeStep", "operator", "comments", - // Optimization - Filtering + // Since v8.3 "filterSynthesis", "filterYearByYear", + // Since v8.7 + "group", ], } as const; From 463632b327bf25d79cd2d75f208cdd25a697cdb0 Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Tue, 23 Apr 2024 11:40:05 +0200 Subject: [PATCH 115/147] fix: resolution of conflicts --- antarest/study/business/area_management.py | 4 +-- .../business/areas/st_storage_management.py | 4 +-- .../business/binding_constraint_management.py | 8 +++--- antarest/study/business/link_management.py | 3 ++- .../rawstudy/model/filesystem/config/model.py | 4 +-- .../model/filesystem/config/st_storage.py | 2 +- .../business/utils_binding_constraint.py | 6 ++--- .../command/create_binding_constraint.py | 27 +++---------------- antarest/study/web/study_data_blueprint.py | 3 +-- 9 files changed, 19 insertions(+), 42 deletions(-) diff --git a/antarest/study/business/area_management.py b/antarest/study/business/area_management.py index f13dfc5e28..db04120884 100644 --- a/antarest/study/business/area_management.py +++ b/antarest/study/business/area_management.py @@ -7,14 +7,14 @@ from antarest.core.exceptions import ConfigFileNotFound, DuplicateAreaName, LayerNotAllowedToBeDeleted, LayerNotFound from antarest.core.model import JSON -from antarest.study.business.utils import AllOptionalMetaclass, camel_case_model, execute_or_add_commands +from antarest.study.business.all_optional_meta import AllOptionalMetaclass, camel_case_model +from antarest.study.business.utils import execute_or_add_commands from antarest.study.model import Patch, PatchArea, PatchCluster, RawStudy, Study from antarest.study.repository import StudyMetadataRepository from antarest.study.storage.patch_service import PatchService from antarest.study.storage.rawstudy.model.filesystem.config.area import ( AdequacyPathProperties, AreaFolder, - AreaUI, OptimizationProperties, ThermalAreasProperties, UIProperties, diff --git a/antarest/study/business/areas/st_storage_management.py b/antarest/study/business/areas/st_storage_management.py index 05ade38c1c..bb0987502c 100644 --- a/antarest/study/business/areas/st_storage_management.py +++ b/antarest/study/business/areas/st_storage_management.py @@ -236,7 +236,7 @@ def create_storage_output( cluster_id: str, config: t.Mapping[str, t.Any], ) -> "STStorageOutput": - obj = create_st_storage_config(study_version=study_version, **config, id=cluster_id) + obj = create_st_storage_config(study_version, **config, id=cluster_id) kwargs = obj.dict(by_alias=False) return STStorageOutput(**kwargs) @@ -361,7 +361,7 @@ def get_all_storages_props( storages_by_areas = collections.defaultdict(dict) for area_id, cluster_obj in storages.items(): for cluster_id, cluster in cluster_obj.items(): - storages_by_areas[area_id][cluster_id] = STStorageOutput.from_config(cluster_id, cluster) + storages_by_areas[area_id][cluster_id] = create_storage_output(int(study.version), cluster_id, cluster) return storages_by_areas diff --git a/antarest/study/business/binding_constraint_management.py b/antarest/study/business/binding_constraint_management.py index af37a9e1c5..9c29c5925d 100644 --- a/antarest/study/business/binding_constraint_management.py +++ b/antarest/study/business/binding_constraint_management.py @@ -10,11 +10,9 @@ from antarest.core.exceptions import ( BindingConstraintNotFound, - ConfigFileNotFound, ConstraintAlreadyExistError, ConstraintIdNotFoundError, DuplicateConstraintName, - IncoherenceBetweenMatricesLength, InvalidConstraintName, InvalidFieldForVersionError, MatrixWidthMismatchError, @@ -323,7 +321,7 @@ class ConstraintOutput870(ConstraintOutput830): def _get_references_by_widths( file_study: FileStudy, bcs: t.Sequence[ConstraintOutput] -) -> Mapping[int, Sequence[Tuple[str, str]]]: +) -> t.Mapping[int, t.Sequence[t.Tuple[str, str]]]: """ Iterates over each BC and its associated matrices. For each matrix, it checks its width according to the expected matrix shapes. @@ -363,7 +361,7 @@ def _get_references_by_widths( return references_by_width -def _validate_binding_constraints(file_study: FileStudy, bcs: Sequence[ConstraintOutput]) -> bool: +def _validate_binding_constraints(file_study: FileStudy, bcs: t.Sequence[ConstraintOutput]) -> bool: """ Validates the binding constraints within a group. """ @@ -371,7 +369,7 @@ def _validate_binding_constraints(file_study: FileStudy, bcs: Sequence[Constrain if len(references_by_widths) > 1: most_common = collections.Counter(references_by_widths.keys()).most_common() - invalid_constraints: Dict[str, str] = {} + invalid_constraints: t.Dict[str, str] = {} for width, _ in most_common[1:]: references = references_by_widths[width] diff --git a/antarest/study/business/link_management.py b/antarest/study/business/link_management.py index 746a998ba6..375a539fd8 100644 --- a/antarest/study/business/link_management.py +++ b/antarest/study/business/link_management.py @@ -4,7 +4,8 @@ from antarest.core.exceptions import ConfigFileNotFound from antarest.core.model import JSON -from antarest.study.business.utils import AllOptionalMetaclass, camel_case_model, execute_or_add_commands +from antarest.study.business.all_optional_meta import AllOptionalMetaclass, camel_case_model +from antarest.study.business.utils import execute_or_add_commands from antarest.study.model import RawStudy from antarest.study.storage.rawstudy.model.filesystem.config.links import LinkProperties from antarest.study.storage.storage_service import StudyStorageService diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/model.py b/antarest/study/storage/rawstudy/model/filesystem/config/model.py index 2ec15e6915..ff79c51073 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/config/model.py +++ b/antarest/study/storage/rawstudy/model/filesystem/config/model.py @@ -2,11 +2,11 @@ import typing as t from pathlib import Path -from pydantic import BaseModel -from pydantic import Field, root_validator +from pydantic import BaseModel, Field, root_validator from antarest.core.utils.utils import DTO from antarest.study.business.enum_ignore_case import EnumIgnoreCase + from .binding_constraint import BindingConstraintFrequency from .field_validators import extract_filtering from .renewable import RenewableConfigType diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/st_storage.py b/antarest/study/storage/rawstudy/model/filesystem/config/st_storage.py index 9d8dd72229..3355ba571a 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/config/st_storage.py +++ b/antarest/study/storage/rawstudy/model/filesystem/config/st_storage.py @@ -149,7 +149,7 @@ class STStorage880Config(STStorage880Properties, LowerCaseIdentifier): STStorageConfigType = t.Union[STStorageConfig, STStorage880Config] -def get_st_storage_config_cls(study_version: t.Union[str, int]) -> t.Type[STStorageConfig]: +def get_st_storage_config_cls(study_version: t.Union[str, int]) -> t.Type[STStorageConfigType]: """ Retrieves the short-term storage configuration class based on the study version. diff --git a/antarest/study/storage/variantstudy/business/utils_binding_constraint.py b/antarest/study/storage/variantstudy/business/utils_binding_constraint.py index 552afed5e0..ebbeeec739 100644 --- a/antarest/study/storage/variantstudy/business/utils_binding_constraint.py +++ b/antarest/study/storage/variantstudy/business/utils_binding_constraint.py @@ -1,9 +1,7 @@ import typing as t -from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import ( - BindingConstraintFrequency, -) -from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig, BindingConstraintDTO +from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import BindingConstraintFrequency +from antarest.study.storage.rawstudy.model.filesystem.config.model import BindingConstraintDTO, FileStudyTreeConfig def parse_bindings_coeffs_and_save_into_config( diff --git a/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py b/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py index b5397e4513..96e9643165 100644 --- a/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py +++ b/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py @@ -91,6 +91,10 @@ class BindingConstraintProperties830(BindingConstraintPropertiesBase): filter_year_by_year: str = Field("", alias="filter-year-by-year") filter_synthesis: str = Field("", alias="filter-synthesis") + @validator("filter_synthesis", "filter_year_by_year", pre=True) + def _validate_filtering(cls, v: t.Any) -> str: + return validate_filtering(v) + class BindingConstraintProperties870(BindingConstraintProperties830): group: str = DEFAULT_GROUP @@ -130,10 +134,6 @@ def create_binding_constraint_config(study_version: t.Union[str, int], **kwargs: cls = get_binding_constraint_config_cls(study_version) return cls.from_dict(**kwargs) - @validator("filter_synthesis", "filter_year_by_year", pre=True) - def _validate_filtering(cls, v: t.Any) -> str: - return validate_filtering(v) - class OptionalProperties(BindingConstraintProperties870, metaclass=AllOptionalMetaclass, use_none=True): pass @@ -144,25 +144,6 @@ class OptionalProperties(BindingConstraintProperties870, metaclass=AllOptionalMe # ================================================================================= -BindingConstraintPropertiesType = t.Union[BindingConstraintProperties870, BindingConstraintProperties] - - -def get_binding_constraint_config_cls(study_version: t.Union[str, int]) -> t.Type[BindingConstraintPropertiesType]: - """ - Retrieves the short-term storage configuration class based on the study version. - - Args: - study_version: The version of the study. - - Returns: - The short-term storage configuration class. - """ - version = int(study_version) - if version >= 870: - return BindingConstraintProperties870 - return BindingConstraintProperties - - class BindingConstraintMatrices(BaseModel, extra=Extra.forbid, allow_population_by_field_name=True): """ Class used to store the matrices of a binding constraint. diff --git a/antarest/study/web/study_data_blueprint.py b/antarest/study/web/study_data_blueprint.py index 11d5945bd2..ecdd5ff191 100644 --- a/antarest/study/web/study_data_blueprint.py +++ b/antarest/study/web/study_data_blueprint.py @@ -18,8 +18,7 @@ from antarest.study.business.adequacy_patch_management import AdequacyPatchFormFields from antarest.study.business.advanced_parameters_management import AdvancedParamsFormFields from antarest.study.business.allocation_management import AllocationFormFields, AllocationMatrix -from antarest.study.business.area_management import AreaCreationDTO, AreaInfoDTO, AreaType, LayerInfoDTO, \ - UpdateAreaUi +from antarest.study.business.area_management import AreaCreationDTO, AreaInfoDTO, AreaType, LayerInfoDTO, UpdateAreaUi from antarest.study.business.areas.hydro_management import InflowStructure, ManagementOptionsFormFields from antarest.study.business.areas.properties_management import PropertiesFormFields from antarest.study.business.areas.renewable_management import ( From 9c8f37f46ef272f0a29b8568454b50a007134d20 Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Tue, 23 Apr 2024 12:53:55 +0200 Subject: [PATCH 116/147] feat(config-ui): add "Geographic Trimming (Binding Constraints)" tab Co-authored-by: Samir Kamal <1954121+skamril@users.noreply.github.com> --- webapp/public/locales/en/main.json | 5 +++-- webapp/public/locales/fr/main.json | 1 + .../Singlestudy/explore/Configuration/index.tsx | 14 ++++++++++++++ 3 files changed, 18 insertions(+), 2 deletions(-) diff --git a/webapp/public/locales/en/main.json b/webapp/public/locales/en/main.json index f3d8e241a9..788dec17c1 100644 --- a/webapp/public/locales/en/main.json +++ b/webapp/public/locales/en/main.json @@ -389,8 +389,9 @@ "study.configuration.advancedParameters.simulationCores": "Simulation cores", "study.configuration.advancedParameters.renewableGenerationModeling": "Renewable generation modeling", "study.configuration.economicOpt": "Economic Opt.", - "study.configuration.geographicTrimmingAreas": "Geographic Trimming (areas)", - "study.configuration.geographicTrimmingLinks": "Geographic Trimming (links)", + "study.configuration.geographicTrimmingAreas": "Geographic Trimming (Areas)", + "study.configuration.geographicTrimmingLinks": "Geographic Trimming (Links)", + "study.configuration.geographicTrimmingBindingConstraints": "Geographic Trimming (Binding Constraints)", "study.modelization.properties": "Properties", "study.modelization.properties.energyCost": "Energy cost (€/Wh)", "study.modelization.properties.unsupplied": "Unsupplied", diff --git a/webapp/public/locales/fr/main.json b/webapp/public/locales/fr/main.json index bec0e911cc..8fc1b6c90c 100644 --- a/webapp/public/locales/fr/main.json +++ b/webapp/public/locales/fr/main.json @@ -391,6 +391,7 @@ "study.configuration.economicOpt": "Options économiques", "study.configuration.geographicTrimmingAreas": "Filtre géographique (zones)", "study.configuration.geographicTrimmingLinks": "Filtre géographique (liens)", + "study.configuration.geographicTrimmingBindingConstraints": "Filtre géographique (contraintes couplantes)", "study.modelization.properties": "Propriétés", "study.modelization.properties.energyCost": "Coût de l'énergie", "study.modelization.properties.unsupplied": "Non distribuée", diff --git a/webapp/src/components/App/Singlestudy/explore/Configuration/index.tsx b/webapp/src/components/App/Singlestudy/explore/Configuration/index.tsx index cfc8f21993..fd649e9e7d 100644 --- a/webapp/src/components/App/Singlestudy/explore/Configuration/index.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Configuration/index.tsx @@ -31,6 +31,10 @@ function Configuration() { { id: 5, name: t("study.configuration.economicOpt") }, { id: 6, name: t("study.configuration.geographicTrimmingAreas") }, { id: 7, name: t("study.configuration.geographicTrimmingLinks") }, + { + id: 8, + name: t("study.configuration.geographicTrimmingBindingConstraints"), + }, ].filter(Boolean), [study.version, t], ); @@ -96,6 +100,16 @@ function Configuration() { /> ), ], + [ + R.equals(8), + () => ( + + ), + ], ])(tabList[currentTabIndex].id)} } From f3863c2a7e495a171cbd3510d2148624c9261f60 Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Mon, 22 Apr 2024 20:58:06 +0200 Subject: [PATCH 117/147] fix(st-storage): turn `STStorageOutput` fields into optional fields --- antarest/study/business/areas/st_storage_management.py | 7 ++++--- tests/integration/study_data_blueprint/test_table_mode.py | 5 +++++ 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/antarest/study/business/areas/st_storage_management.py b/antarest/study/business/areas/st_storage_management.py index bb0987502c..373f8c3ea4 100644 --- a/antarest/study/business/areas/st_storage_management.py +++ b/antarest/study/business/areas/st_storage_management.py @@ -80,7 +80,7 @@ def to_config(self, study_version: t.Union[str, int]) -> STStorageConfigType: @camel_case_model -class STStorageOutput(STStorage880Config): +class STStorageOutput(STStorage880Config, metaclass=AllOptionalMetaclass, use_none=True): """ Model representing the form used to display the details of a short-term storage entry. """ @@ -236,7 +236,7 @@ def create_storage_output( cluster_id: str, config: t.Mapping[str, t.Any], ) -> "STStorageOutput": - obj = create_st_storage_config(study_version, **config, id=cluster_id) + obj = create_st_storage_config(study_version=study_version, **config, id=cluster_id) kwargs = obj.dict(by_alias=False) return STStorageOutput(**kwargs) @@ -357,11 +357,12 @@ def get_all_storages_props( except KeyError: raise STStorageConfigNotFound(path) from None + study_version = study.version storages_by_areas: t.MutableMapping[str, t.MutableMapping[str, STStorageOutput]] storages_by_areas = collections.defaultdict(dict) for area_id, cluster_obj in storages.items(): for cluster_id, cluster in cluster_obj.items(): - storages_by_areas[area_id][cluster_id] = create_storage_output(int(study.version), cluster_id, cluster) + storages_by_areas[area_id][cluster_id] = create_storage_output(study_version, cluster_id, cluster) return storages_by_areas diff --git a/tests/integration/study_data_blueprint/test_table_mode.py b/tests/integration/study_data_blueprint/test_table_mode.py index 84cd61df5b..15536b8426 100644 --- a/tests/integration/study_data_blueprint/test_table_mode.py +++ b/tests/integration/study_data_blueprint/test_table_mode.py @@ -594,6 +594,7 @@ def test_lifecycle__nominal( "id", "name", # Short-term storage fields + "enabled", # since v8.8 "group", "injectionNominalCapacity", "withdrawalNominalCapacity", @@ -670,6 +671,7 @@ def test_lifecycle__nominal( # "id": "siemens", # "name": "Siemens", "efficiency": 1, + "enabled": None, "group": "Battery", "initialLevel": 0.5, "initialLevelOptim": False, @@ -681,6 +683,7 @@ def test_lifecycle__nominal( # "id": "tesla", # "name": "Tesla", "efficiency": 0.75, + "enabled": None, "group": "Battery", "initialLevel": 0.89, "initialLevelOptim": False, @@ -692,6 +695,7 @@ def test_lifecycle__nominal( # "id": "storage3", # "name": "storage3", "efficiency": 1, + "enabled": None, "group": "Pondage", "initialLevel": 1, "initialLevelOptim": False, @@ -703,6 +707,7 @@ def test_lifecycle__nominal( # "id": "storage4", # "name": "storage4", "efficiency": 1, + "enabled": None, "group": "PSP_open", "initialLevel": 0.5, "initialLevelOptim": True, From 38cb6532a40d049ef5230b2ec28b120e6a38d20a Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Tue, 23 Apr 2024 15:02:04 +0200 Subject: [PATCH 118/147] test(st-storage): correct integration tests for ST Storages --- .../study_data_blueprint/test_st_storage.py | 102 +++++++++++------- 1 file changed, 65 insertions(+), 37 deletions(-) diff --git a/tests/integration/study_data_blueprint/test_st_storage.py b/tests/integration/study_data_blueprint/test_st_storage.py index 33b506fc77..b8aa0de878 100644 --- a/tests/integration/study_data_blueprint/test_st_storage.py +++ b/tests/integration/study_data_blueprint/test_st_storage.py @@ -8,14 +8,22 @@ from starlette.testclient import TestClient from antarest.core.tasks.model import TaskStatus -from antarest.study.business.areas.st_storage_management import STStorageOutput +from antarest.study.business.areas.st_storage_management import create_storage_output from antarest.study.storage.rawstudy.model.filesystem.config.model import transform_name_to_id -from antarest.study.storage.rawstudy.model.filesystem.config.st_storage import STStorageConfig +from antarest.study.storage.rawstudy.model.filesystem.config.st_storage import create_st_storage_config from tests.integration.utils import wait_task_completion -DEFAULT_CONFIG = json.loads(STStorageConfig(id="dummy", name="dummy").json(by_alias=True, exclude={"id", "name"})) +_ST_STORAGE_860_CONFIG = create_st_storage_config(860, name="dummy") +_ST_STORAGE_880_CONFIG = create_st_storage_config(880, name="dummy") -DEFAULT_PROPERTIES = json.loads(STStorageOutput(name="dummy").json(by_alias=True, exclude={"id", "name"})) +_ST_STORAGE_OUTPUT_860 = create_storage_output(860, cluster_id="dummy", config={"name": "dummy"}) +_ST_STORAGE_OUTPUT_880 = create_storage_output(880, cluster_id="dummy", config={"name": "dummy"}) + +DEFAULT_CONFIG_860 = json.loads(_ST_STORAGE_860_CONFIG.json(by_alias=True, exclude={"id", "name"})) +DEFAULT_CONFIG_880 = json.loads(_ST_STORAGE_880_CONFIG.json(by_alias=True, exclude={"id", "name"})) + +DEFAULT_OUTPUT_860 = json.loads(_ST_STORAGE_OUTPUT_860.json(by_alias=True, exclude={"id", "name"})) +DEFAULT_OUTPUT_880 = json.loads(_ST_STORAGE_OUTPUT_880.json(by_alias=True, exclude={"id", "name"})) # noinspection SpellCheckingInspection @@ -30,7 +38,13 @@ class TestSTStorage: """ @pytest.mark.parametrize("study_type", ["raw", "variant"]) - @pytest.mark.parametrize("study_version", [860, 880]) + @pytest.mark.parametrize( + "study_version, default_output", + [ + pytest.param(860, DEFAULT_OUTPUT_860, id="860"), + pytest.param(880, DEFAULT_OUTPUT_880, id="880"), + ], + ) def test_lifecycle__nominal( self, client: TestClient, @@ -38,6 +52,7 @@ def test_lifecycle__nominal( study_id: str, study_type: str, study_version: int, + default_output: t.Dict[str, t.Any], ) -> None: """ The purpose of this integration test is to test the endpoints @@ -68,7 +83,7 @@ def test_lifecycle__nominal( # ============================= user_headers = {"Authorization": f"Bearer {user_access_token}"} - # Upgrade study to version 860 + # Upgrade study to version 860 or above res = client.put( f"/v1/studies/{study_id}/upgrade", headers=user_headers, @@ -118,17 +133,15 @@ def test_lifecycle__nominal( assert res.status_code == 422, res.json() assert res.json()["exception"] in {"ValidationError", "RequestValidationError"}, res.json() - # We can create a short-term storage with the following properties: + # We can create a short-term storage with the following properties. + # Unfilled properties will be set to their default values. siemens_properties = { - **DEFAULT_PROPERTIES, "name": siemens_battery, "group": "Battery", "injectionNominalCapacity": 1450, "withdrawalNominalCapacity": 1350, "reservoirCapacity": 1500, } - if study_version < 880: - del siemens_properties["enabled"] # only exists since v8.8 res = client.post( f"/v1/studies/{study_id}/areas/{area_id}/storages", headers=user_headers, @@ -137,8 +150,8 @@ def test_lifecycle__nominal( assert res.status_code == 200, res.json() siemens_battery_id = res.json()["id"] assert siemens_battery_id == transform_name_to_id(siemens_battery) - siemens_config = {**siemens_properties, "id": siemens_battery_id, "enabled": True} - assert res.json() == siemens_config + siemens_output = {**default_output, **siemens_properties, "id": siemens_battery_id} + assert res.json() == siemens_output # reading the properties of a short-term storage res = client.get( @@ -146,7 +159,7 @@ def test_lifecycle__nominal( headers=user_headers, ) assert res.status_code == 200, res.json() - assert res.json() == siemens_config + assert res.json() == siemens_output # ============================= # SHORT-TERM STORAGE MATRICES @@ -195,7 +208,7 @@ def test_lifecycle__nominal( headers=user_headers, ) assert res.status_code == 200, res.json() - assert res.json() == [siemens_config] + assert res.json() == [siemens_output] # updating properties res = client.patch( @@ -207,19 +220,19 @@ def test_lifecycle__nominal( }, ) assert res.status_code == 200, res.json() - siemens_config = { - **siemens_config, + siemens_output = { + **siemens_output, "name": "New Siemens Battery", "reservoirCapacity": 2500, } - assert res.json() == siemens_config + assert res.json() == siemens_output res = client.get( f"/v1/studies/{study_id}/areas/{area_id}/storages/{siemens_battery_id}", headers=user_headers, ) assert res.status_code == 200, res.json() - assert res.json() == siemens_config + assert res.json() == siemens_output # =========================== # SHORT-TERM STORAGE UPDATE @@ -234,13 +247,13 @@ def test_lifecycle__nominal( "reservoirCapacity": 0, }, ) - siemens_config = { - **siemens_config, + siemens_output = { + **siemens_output, "initialLevel": 0.59, "reservoirCapacity": 0, } assert res.status_code == 200, res.json() - assert res.json() == siemens_config + assert res.json() == siemens_output # An attempt to update the `efficiency` property with an invalid value # should raise a validation error. @@ -260,7 +273,7 @@ def test_lifecycle__nominal( headers=user_headers, ) assert res.status_code == 200, res.json() - assert res.json() == siemens_config + assert res.json() == siemens_output # ============================= # SHORT-TERM STORAGE DUPLICATION @@ -274,11 +287,11 @@ def test_lifecycle__nominal( ) assert res.status_code in {200, 201}, res.json() # asserts the config is the same - duplicated_config = dict(siemens_config) - duplicated_config["name"] = new_name # type: ignore + duplicated_output = dict(siemens_output) + duplicated_output["name"] = new_name duplicated_id = transform_name_to_id(new_name) - duplicated_config["id"] = duplicated_id # type: ignore - assert res.json() == duplicated_config + duplicated_output["id"] = duplicated_id + assert res.json() == duplicated_output # asserts the matrix has also been duplicated res = client.get( @@ -358,16 +371,16 @@ def test_lifecycle__nominal( headers=user_headers, ) assert res.status_code == 200, res.json() - siemens_config = {**DEFAULT_PROPERTIES, **siemens_properties, "id": siemens_battery_id} - grand_maison_config = {**DEFAULT_PROPERTIES, **grand_maison_properties, "id": grand_maison_id} - assert res.json() == [duplicated_config, siemens_config, grand_maison_config] + siemens_output = {**default_output, **siemens_properties, "id": siemens_battery_id} + grand_maison_output = {**default_output, **grand_maison_properties, "id": grand_maison_id} + assert res.json() == [duplicated_output, siemens_output, grand_maison_output] # We can delete the three short-term storages at once. res = client.request( "DELETE", f"/v1/studies/{study_id}/areas/{area_id}/storages", headers=user_headers, - json=[grand_maison_id, duplicated_config["id"]], + json=[grand_maison_id, duplicated_output["id"]], ) assert res.status_code == 204, res.json() assert res.text in {"", "null"} # Old FastAPI versions return 'null'. @@ -539,7 +552,22 @@ def test_lifecycle__nominal( assert res.json()["enabled"] is False @pytest.mark.parametrize("study_type", ["raw", "variant"]) - def test__default_values(self, client: TestClient, user_access_token: str, study_type: str) -> None: + @pytest.mark.parametrize( + "study_version, default_config, default_output", + [ + pytest.param(860, DEFAULT_CONFIG_860, DEFAULT_OUTPUT_860, id="860"), + pytest.param(880, DEFAULT_CONFIG_880, DEFAULT_OUTPUT_880, id="880"), + ], + ) + def test__default_values( + self, + client: TestClient, + user_access_token: str, + study_type: str, + study_version: int, + default_config: t.Dict[str, t.Any], + default_output: t.Dict[str, t.Any], + ) -> None: """ The purpose of this integration test is to test the default values of the properties of a short-term storage. @@ -553,7 +581,7 @@ def test__default_values(self, client: TestClient, user_access_token: str, study res = client.post( "/v1/studies", headers=user_headers, - params={"name": "MyStudy", "version": 860}, + params={"name": "MyStudy", "version": study_version}, ) assert res.status_code in {200, 201}, res.json() study_id = res.json() @@ -586,8 +614,8 @@ def test__default_values(self, client: TestClient, user_access_token: str, study ) assert res.status_code == 200, res.json() tesla_battery_id = res.json()["id"] - tesla_config = {**DEFAULT_PROPERTIES, "id": tesla_battery_id, "name": tesla_battery, "group": "Battery"} - assert res.json() == tesla_config + tesla_output = {**default_output, "id": tesla_battery_id, "name": tesla_battery, "group": "Battery"} + assert res.json() == tesla_output # Use the Debug mode to make sure that the initialLevel and initialLevelOptim properties # are properly set in the configuration file. @@ -598,7 +626,7 @@ def test__default_values(self, client: TestClient, user_access_token: str, study ) assert res.status_code == 200, res.json() actual = res.json() - expected = {**DEFAULT_CONFIG, "name": tesla_battery, "group": "Battery"} + expected = {**default_config, "name": tesla_battery, "group": "Battery"} assert actual == expected # We want to make sure that the default properties are applied to a study variant. @@ -637,7 +665,7 @@ def test__default_values(self, client: TestClient, user_access_token: str, study "action": "create_st_storage", "args": { "area_id": "fr", - "parameters": {**DEFAULT_CONFIG, "name": siemens_battery, "group": "Battery"}, + "parameters": {**default_config, "name": siemens_battery, "group": "Battery"}, "pmax_injection": ANY, "pmax_withdrawal": ANY, "lower_rule_curve": ANY, @@ -721,7 +749,7 @@ def test__default_values(self, client: TestClient, user_access_token: str, study assert res.status_code == 200, res.json() actual = res.json() expected = { - **DEFAULT_CONFIG, + **default_config, "name": siemens_battery, "group": "Battery", "injectionnominalcapacity": 1600, From 95a5ac567e43b17c88a7ea952a45544ebd0cad06 Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Tue, 23 Apr 2024 16:58:56 +0200 Subject: [PATCH 119/147] test(st-storage): correct unit tests for ST Storages --- .../business/areas/test_st_storage_management.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/tests/study/business/areas/test_st_storage_management.py b/tests/study/business/areas/test_st_storage_management.py index f6c221d58b..74089bc7cd 100644 --- a/tests/study/business/areas/test_st_storage_management.py +++ b/tests/study/business/areas/test_st_storage_management.py @@ -142,6 +142,7 @@ def test_get_all_storages__nominal_case( "west": [ { "id": "storage1", + "enabled": None, "group": STStorageGroup.BATTERY, "name": "Storage1", "injectionNominalCapacity": 1500.0, @@ -153,6 +154,7 @@ def test_get_all_storages__nominal_case( }, { "id": "storage2", + "enabled": None, "group": STStorageGroup.PSP_CLOSED, "name": "Storage2", "injectionNominalCapacity": 2000.0, @@ -164,6 +166,7 @@ def test_get_all_storages__nominal_case( }, { "id": "storage3", + "enabled": None, "group": STStorageGroup.PSP_CLOSED, "name": "Storage3", "injectionNominalCapacity": 1500.0, @@ -250,7 +253,7 @@ def test_get_st_storages__nominal_case( "name": "Storage1", "reservoirCapacity": 20000.0, "withdrawalNominalCapacity": 1500.0, - "enabled": True, # present with default value even if the study is in v8.6 + "enabled": None, }, { "efficiency": 0.78, @@ -262,7 +265,7 @@ def test_get_st_storages__nominal_case( "name": "Storage2", "reservoirCapacity": 20000.0, "withdrawalNominalCapacity": 1500.0, - "enabled": True, + "enabled": None, }, { "efficiency": 0.72, @@ -274,7 +277,7 @@ def test_get_st_storages__nominal_case( "name": "Storage3", "reservoirCapacity": 21000.0, "withdrawalNominalCapacity": 1500.0, - "enabled": True, + "enabled": None, }, ] assert actual == expected @@ -361,7 +364,7 @@ def test_get_st_storage__nominal_case( "name": "Storage1", "reservoirCapacity": 20000.0, "withdrawalNominalCapacity": 1500.0, - "enabled": True, + "enabled": None, } assert actual == expected From 893cdd43cae7a5f9e52cea886fb4fc14229a654e Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Tue, 23 Apr 2024 17:07:16 +0200 Subject: [PATCH 120/147] test(bc): correct unit tests for ST Storages --- .../model/command/test_manage_binding_constraints.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/variantstudy/model/command/test_manage_binding_constraints.py b/tests/variantstudy/model/command/test_manage_binding_constraints.py index f848883831..d79f744960 100644 --- a/tests/variantstudy/model/command/test_manage_binding_constraints.py +++ b/tests/variantstudy/model/command/test_manage_binding_constraints.py @@ -365,6 +365,8 @@ def test_revert(command_context: CommandContext): enabled=True, time_step=BindingConstraintFrequency.HOURLY, operator=BindingConstraintOperator.EQUAL, + filter_year_by_year="", + filter_synthesis="", coeffs={"a": [0.3]}, values=hourly_matrix_id, command_context=command_context, From 761bc57d4015ecb3816422dd39d46dfc1c2de827 Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE <43534797+laurent-laporte-pro@users.noreply.github.com> Date: Wed, 24 Apr 2024 16:02:31 +0200 Subject: [PATCH 121/147] fix(ui-tablemode): add missing "ST Storage" in Table Mode template (#2016) --- webapp/src/services/api/studies/tableMode/constants.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/webapp/src/services/api/studies/tableMode/constants.ts b/webapp/src/services/api/studies/tableMode/constants.ts index e75e163cc3..09ccdc82ff 100644 --- a/webapp/src/services/api/studies/tableMode/constants.ts +++ b/webapp/src/services/api/studies/tableMode/constants.ts @@ -10,6 +10,7 @@ export const TABLE_MODE_TYPES = [ LINKS, THERMALS, RENEWABLES, + ST_STORAGES, BINDING_CONSTRAINTS, ] as const; From bfcd6fac05d25858a4a27c1902c5fb959717f417 Mon Sep 17 00:00:00 2001 From: Hatim Dinia <33469289+hdinia@users.noreply.github.com> Date: Mon, 29 Apr 2024 12:17:14 +0200 Subject: [PATCH 122/147] fix(ui-settings): prevent false duplicates on group form updates (#1998) --- .../App/Settings/Groups/dialog/GroupFormDialog/GroupForm.tsx | 1 + 1 file changed, 1 insertion(+) diff --git a/webapp/src/components/App/Settings/Groups/dialog/GroupFormDialog/GroupForm.tsx b/webapp/src/components/App/Settings/Groups/dialog/GroupFormDialog/GroupForm.tsx index e17dee147b..39e8e26ca4 100644 --- a/webapp/src/components/App/Settings/Groups/dialog/GroupFormDialog/GroupForm.tsx +++ b/webapp/src/components/App/Settings/Groups/dialog/GroupFormDialog/GroupForm.tsx @@ -115,6 +115,7 @@ function GroupForm(props: UseFormReturnPlus) { validateString(v, { existingValues: existingGroups, excludedValues: RESERVED_GROUP_NAMES, + editedValue: defaultValues?.name, // prevent false duplicates on update form }), })} /> From ec66be399fdf8db3abd17c06a8ba77d7a8250cc4 Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE <43534797+laurent-laporte-pro@users.noreply.github.com> Date: Mon, 29 Apr 2024 14:59:09 +0200 Subject: [PATCH 123/147] ci(hydro): avoid SonarCloud warning about unfilled todo (#2017) --- .../model/filesystem/root/input/hydro/hydro_ini.py | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/antarest/study/storage/rawstudy/model/filesystem/root/input/hydro/hydro_ini.py b/antarest/study/storage/rawstudy/model/filesystem/root/input/hydro/hydro_ini.py index 9e07a32506..2065ea45e5 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/root/input/hydro/hydro_ini.py +++ b/antarest/study/storage/rawstudy/model/filesystem/root/input/hydro/hydro_ini.py @@ -4,8 +4,20 @@ class InputHydroIni(IniFileNode): + # noinspection SpellCheckingInspection def __init__(self, context: ContextServer, config: FileStudyTreeConfig): - # TODO: missing "use heuristic", "follow load" and "reservoir capacity" + # The "use heuristic", "follow load" and "reservoir capacity" parameters are missing here, + # but are well taken into account in the `HydroManager` class and can be modified + # by the user in the graphical interface. + # + # They are very used in the representation of hydro. + # - "use heuristic" allows to define a reservoir management mode which consists + # in turbinating a certain fixed quantity each week. + # - "reservoir capacity" is the capacity of the reservoir in MWh. + # - "follow load" is a parameter whose activation (with others) helps to define + # the amount of water that can be turbinated for each reservoir each week. + # This amount depends on the consumption of the node on which the reservoir is located, hence the name. + sections = [ "inter-daily-breakdown", "intra-daily-modulation", From d0f0bf2a05a8170741b9eb88c08f6b1aa2489162 Mon Sep 17 00:00:00 2001 From: Samir Kamal <1954121+skamril@users.noreply.github.com> Date: Fri, 26 Apr 2024 21:19:57 +0200 Subject: [PATCH 124/147] fix(ui-tablemode): unable to edit tables with old types --- .../dialogs/TableTemplateFormDialog.tsx | 2 +- .../common/fieldEditors/ListFE/index.tsx | 8 ++-- .../api/studies/tableMode/constants.ts | 45 +++++++++++-------- webapp/src/services/utils/localStorage.ts | 14 +++++- 4 files changed, 45 insertions(+), 24 deletions(-) diff --git a/webapp/src/components/App/Singlestudy/explore/TableModeList/dialogs/TableTemplateFormDialog.tsx b/webapp/src/components/App/Singlestudy/explore/TableModeList/dialogs/TableTemplateFormDialog.tsx index 8517268e45..820852492a 100644 --- a/webapp/src/components/App/Singlestudy/explore/TableModeList/dialogs/TableTemplateFormDialog.tsx +++ b/webapp/src/components/App/Singlestudy/explore/TableModeList/dialogs/TableTemplateFormDialog.tsx @@ -75,7 +75,7 @@ function TableTemplateFormDialog(props: TableTemplateFormDialogProps) { /> { - defaultValue?: readonly TItem[]; - value?: readonly TItem[]; - options: readonly TOption[]; + defaultValue?: TItem[]; + value?: TItem[]; + options?: TOption[]; label?: string; getOptionLabel?: (option: TOption) => string; getValueLabel?: (value: TItem) => string; @@ -64,7 +64,7 @@ function ListFE(props: ListFEProps) { value, defaultValue, label, - options, + options = [], getOptionLabel = makeLabel, getValueLabel = makeLabel, optionToItem = (option: TOption) => option as unknown as TItem, diff --git a/webapp/src/services/api/studies/tableMode/constants.ts b/webapp/src/services/api/studies/tableMode/constants.ts index 09ccdc82ff..acef3d51f6 100644 --- a/webapp/src/services/api/studies/tableMode/constants.ts +++ b/webapp/src/services/api/studies/tableMode/constants.ts @@ -1,21 +1,30 @@ -const AREAS = "areas"; -const LINKS = "links"; -const THERMALS = "thermals"; -const RENEWABLES = "renewables"; -const ST_STORAGES = "st-storages"; -const BINDING_CONSTRAINTS = "binding-constraints"; +const AREA = "areas"; +const LINK = "links"; +const THERMAL = "thermals"; +const RENEWABLE = "renewables"; +const ST_STORAGE = "st-storages"; +const BINDING_CONSTRAINT = "binding-constraints"; export const TABLE_MODE_TYPES = [ - AREAS, - LINKS, - THERMALS, - RENEWABLES, - ST_STORAGES, - BINDING_CONSTRAINTS, + AREA, + LINK, + THERMAL, + RENEWABLE, + ST_STORAGE, + BINDING_CONSTRAINT, ] as const; +// Deprecated types (breaking change from v2.16.8) +export const TABLE_MODE_TYPES_ALIASES = { + area: AREA, + link: LINK, + cluster: THERMAL, + renewable: RENEWABLE, + "binding constraint": BINDING_CONSTRAINT, +}; + export const TABLE_MODE_COLUMNS_BY_TYPE = { - [AREAS]: [ + [AREA]: [ "nonDispatchablePower", "dispatchableHydroPower", "otherDispatchablePower", @@ -28,7 +37,7 @@ export const TABLE_MODE_COLUMNS_BY_TYPE = { // Since v8.3 "adequacyPatchMode", ], - [LINKS]: [ + [LINK]: [ "hurdlesCost", "loopFlow", "usePhaseShifter", @@ -41,7 +50,7 @@ export const TABLE_MODE_COLUMNS_BY_TYPE = { "filterSynthesis", "filterYearByYear", ], - [THERMALS]: [ + [THERMAL]: [ "group", "enabled", "unitCount", @@ -80,7 +89,7 @@ export const TABLE_MODE_COLUMNS_BY_TYPE = { "efficiency", "variableOMCost", ], - [RENEWABLES]: [ + [RENEWABLE]: [ // Since v8.1 "group", "enabled", @@ -88,7 +97,7 @@ export const TABLE_MODE_COLUMNS_BY_TYPE = { "unitCount", "nominalCapacity", ], - [ST_STORAGES]: [ + [ST_STORAGE]: [ // Since v8.6 "group", "injectionNominalCapacity", @@ -100,7 +109,7 @@ export const TABLE_MODE_COLUMNS_BY_TYPE = { // Since v8.8 "enabled", ], - [BINDING_CONSTRAINTS]: [ + [BINDING_CONSTRAINT]: [ "enabled", "timeStep", "operator", diff --git a/webapp/src/services/utils/localStorage.ts b/webapp/src/services/utils/localStorage.ts index fa5c084c56..faa230e17a 100644 --- a/webapp/src/services/utils/localStorage.ts +++ b/webapp/src/services/utils/localStorage.ts @@ -4,6 +4,7 @@ import { UserInfo } from "../../common/types"; import { TableTemplate } from "../../components/App/Singlestudy/explore/TableModeList/utils"; import { StudiesSortConf, StudiesState } from "../../redux/ducks/studies"; import { UIState } from "../../redux/ducks/ui"; +import { TABLE_MODE_TYPES_ALIASES } from "../api/studies/tableMode/constants"; export enum StorageKey { Version = "version", @@ -46,7 +47,18 @@ function getItem(key: T): TypeFromKey[T] | null { if (serializedState === null) { return null; } - return JSON.parse(serializedState); + const res = JSON.parse(serializedState); + + // Convert deprecated types to new ones (breaking change from v2.16.8) + if (key === StorageKey.StudiesModelTableModeTemplates) { + return res.map((template: Record) => ({ + ...template, + // @ts-expect-error To ignore error TS2551 + type: TABLE_MODE_TYPES_ALIASES[template.type] ?? template.type, + })); + } + + return res; } catch (err) { return null; } From 819e62e9cb726eb359faefff10a8d3bf176e8324 Mon Sep 17 00:00:00 2001 From: Samir Kamal <1954121+skamril@users.noreply.github.com> Date: Fri, 26 Apr 2024 20:56:31 +0200 Subject: [PATCH 125/147] feat(ui-tablemode): translate table types in add/edit modal --- webapp/public/locales/en/main.json | 8 +++++++- webapp/public/locales/fr/main.json | 8 +++++++- .../TableModeList/dialogs/TableTemplateFormDialog.tsx | 11 ++++++++++- 3 files changed, 24 insertions(+), 3 deletions(-) diff --git a/webapp/public/locales/en/main.json b/webapp/public/locales/en/main.json index 788dec17c1..942d4630a9 100644 --- a/webapp/public/locales/en/main.json +++ b/webapp/public/locales/en/main.json @@ -731,5 +731,11 @@ "results.error.jobs": "Failed to retrieve study launch jobs", "results.error.outputs": "Failed to retrieve study output list", "results.noOutputs": "No outputs", - "results.question.deleteOutput": "Are you sure you want to delete the output {{outputname}}?" + "results.question.deleteOutput": "Are you sure you want to delete the output {{outputname}}?", + "tableMode.type.areas": "Areas", + "tableMode.type.links": "Links", + "tableMode.type.thermals": "Thermals", + "tableMode.type.renewables": "Renewables", + "tableMode.type.st-storages": "Short-Term Storages", + "tableMode.type.binding-constraints": "Binding Constraints" } diff --git a/webapp/public/locales/fr/main.json b/webapp/public/locales/fr/main.json index 8fc1b6c90c..5e4d02b567 100644 --- a/webapp/public/locales/fr/main.json +++ b/webapp/public/locales/fr/main.json @@ -731,5 +731,11 @@ "results.error.jobs": "Erreur lors de la récupération des tâches de lancement", "results.error.outputs": "Erreur lors de la récupération des sorties de l'étude", "results.noOutputs": "Aucune sorties", - "results.question.deleteOutput": "Êtes-vous sûr de vouloir supprimer le résultat de simulation {{outputname}} ?" + "results.question.deleteOutput": "Êtes-vous sûr de vouloir supprimer le résultat de simulation {{outputname}} ?", + "tableMode.type.areas": "Zones", + "tableMode.type.links": "Liens", + "tableMode.type.thermals": "Thermiques", + "tableMode.type.renewables": "Renouvelables", + "tableMode.type.st-storages": "Stockages court terme", + "tableMode.type.binding-constraints": "Contraintes couplantes" } diff --git a/webapp/src/components/App/Singlestudy/explore/TableModeList/dialogs/TableTemplateFormDialog.tsx b/webapp/src/components/App/Singlestudy/explore/TableModeList/dialogs/TableTemplateFormDialog.tsx index 820852492a..df8b3606cb 100644 --- a/webapp/src/components/App/Singlestudy/explore/TableModeList/dialogs/TableTemplateFormDialog.tsx +++ b/webapp/src/components/App/Singlestudy/explore/TableModeList/dialogs/TableTemplateFormDialog.tsx @@ -30,6 +30,15 @@ function TableTemplateFormDialog(props: TableTemplateFormDialogProps) { [templates], ); + const typeOptions = useMemo( + () => + TABLE_MODE_TYPES.map((type) => ({ + value: type, + label: t(`tableMode.type.${type}`), + })), + [t], + ); + //////////////////////////////////////////////////////////////// // JSX //////////////////////////////////////////////////////////////// @@ -67,7 +76,7 @@ function TableTemplateFormDialog(props: TableTemplateFormDialogProps) { /> resetField("columns")} name="type" From 34b97eaf9e7afc000d73c73842841501059826ac Mon Sep 17 00:00:00 2001 From: Samir Kamal <1954121+skamril@users.noreply.github.com> Date: Tue, 30 Apr 2024 17:17:40 +0200 Subject: [PATCH 126/147] fix(ui-tablemode): reset 'column' field when 'type' field change in create/update modal --- .../explore/TableModeList/dialogs/TableTemplateFormDialog.tsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/webapp/src/components/App/Singlestudy/explore/TableModeList/dialogs/TableTemplateFormDialog.tsx b/webapp/src/components/App/Singlestudy/explore/TableModeList/dialogs/TableTemplateFormDialog.tsx index df8b3606cb..0b49e7335c 100644 --- a/webapp/src/components/App/Singlestudy/explore/TableModeList/dialogs/TableTemplateFormDialog.tsx +++ b/webapp/src/components/App/Singlestudy/explore/TableModeList/dialogs/TableTemplateFormDialog.tsx @@ -52,7 +52,7 @@ function TableTemplateFormDialog(props: TableTemplateFormDialogProps) { onSubmit={onSubmit} onCancel={onCancel} > - {({ control, resetField, getValues }) => ( + {({ control, setValue, getValues }) => ( resetField("columns")} + onChange={() => setValue("columns", [])} name="type" control={control} /> From 920df39a2ba0f48ad7e63c89c5997e89aff54ab0 Mon Sep 17 00:00:00 2001 From: Samir Kamal <1954121+skamril@users.noreply.github.com> Date: Fri, 3 May 2024 15:36:05 +0200 Subject: [PATCH 127/147] feat(ui-storages): add parameter 'enabled' for v8.8 (#2021) --- webapp/public/locales/en/main.json | 1 + webapp/public/locales/fr/main.json | 1 + .../Modelization/Areas/Storages/Fields.tsx | 15 +++++++++++++++ .../explore/Modelization/Areas/Storages/index.tsx | 10 ++++++++-- .../explore/Modelization/Areas/Storages/utils.ts | 2 ++ 5 files changed, 27 insertions(+), 2 deletions(-) diff --git a/webapp/public/locales/en/main.json b/webapp/public/locales/en/main.json index 788dec17c1..5710e7cb10 100644 --- a/webapp/public/locales/en/main.json +++ b/webapp/public/locales/en/main.json @@ -71,6 +71,7 @@ "global.undo": "Undo", "global.redo": "Redo", "global.total": "Total", + "global.enabled": "Enabled", "global.time.hourly": "Hourly", "global.time.daily": "Daily", "global.time.weekly": "Weekly", diff --git a/webapp/public/locales/fr/main.json b/webapp/public/locales/fr/main.json index 8fc1b6c90c..b8982996f0 100644 --- a/webapp/public/locales/fr/main.json +++ b/webapp/public/locales/fr/main.json @@ -71,6 +71,7 @@ "global.undo": "Annuler", "global.redo": "Rétablir", "global.total": "Total", + "global.enabled": "Activé", "global.time.hourly": "Horaire", "global.time.daily": "Journalier", "global.time.weekly": "Hebdomadaire", diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/Fields.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/Fields.tsx index 9d6935b3fe..b692466713 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/Fields.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/Fields.tsx @@ -7,10 +7,14 @@ import SwitchFE from "../../../../../../common/fieldEditors/SwitchFE"; import Fieldset from "../../../../../../common/Fieldset"; import { useFormContextPlus } from "../../../../../../common/Form"; import { STORAGE_GROUPS, Storage } from "./utils"; +import { useOutletContext } from "react-router"; +import { StudyMetadata } from "../../../../../../../common/types"; function Fields() { const [t] = useTranslation(); + const { study } = useOutletContext<{ study: StudyMetadata }>(); const { control } = useFormContextPlus(); + const studyVersion = parseInt(study.version, 10); //////////////////////////////////////////////////////////////// // JSX @@ -33,6 +37,17 @@ function Fields() { alignSelf: "center", }} /> + {studyVersion >= 880 && ( + + )} getStorages(study.id, areaId), @@ -46,6 +47,11 @@ function Storages() { totals; return [ + studyVersion >= 880 && + columnHelper.accessor("enabled", { + header: t("global.enabled"), + Cell: BooleanCell, + }), columnHelper.accessor("injectionNominalCapacity", { header: t("study.modelization.storages.injectionNominalCapacity"), Header: ({ column }) => ( @@ -130,8 +136,8 @@ function Storages() { filterVariant: "checkbox", Cell: BooleanCell, }), - ]; - }, [t, totals]); + ].filter(Boolean); + }, [studyVersion, t, totals]); //////////////////////////////////////////////////////////////// // Event handlers diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/utils.ts b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/utils.ts index 48466da646..04864fdc97 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/utils.ts +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/utils.ts @@ -34,6 +34,8 @@ export interface Storage { efficiency: number; initialLevel: number; initialLevelOptim: boolean; + // Since v8.8 + enabled: boolean; } //////////////////////////////////////////////////////////////// From 63ec8d7bddd618565717878e014ead831cef34c3 Mon Sep 17 00:00:00 2001 From: Samir Kamal <1954121+skamril@users.noreply.github.com> Date: Fri, 3 May 2024 15:54:41 +0200 Subject: [PATCH 128/147] refactor(ui-api): update thematic trimming API --- .../dialogs/ThematicTrimmingDialog/index.tsx | 19 +-- .../studies/config/thematicTrimming/index.ts | 25 ++++ .../studies/config/thematicTrimming/types.ts | 111 ++++++++++++++++++ 3 files changed, 146 insertions(+), 9 deletions(-) create mode 100644 webapp/src/services/api/studies/config/thematicTrimming/index.ts create mode 100644 webapp/src/services/api/studies/config/thematicTrimming/types.ts diff --git a/webapp/src/components/App/Singlestudy/explore/Configuration/General/dialogs/ThematicTrimmingDialog/index.tsx b/webapp/src/components/App/Singlestudy/explore/Configuration/General/dialogs/ThematicTrimmingDialog/index.tsx index 01a64cfc79..3988a13525 100644 --- a/webapp/src/components/App/Singlestudy/explore/Configuration/General/dialogs/ThematicTrimmingDialog/index.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Configuration/General/dialogs/ThematicTrimmingDialog/index.tsx @@ -11,12 +11,12 @@ import { import SearchFE from "../../../../../../../common/fieldEditors/SearchFE"; import { isSearchMatching } from "../../../../../../../../utils/stringUtils"; import FormDialog from "../../../../../../../common/dialogs/FormDialog"; +import { getFieldNames } from "./utils"; +import type { ThematicTrimmingConfig } from "../../../../../../../../services/api/studies/config/thematicTrimming/types"; import { - getFieldNames, - getThematicTrimmingFormFields, + getThematicTrimmingConfig, setThematicTrimmingConfig, - ThematicTrimmingFormFields, -} from "./utils"; +} from "../../../../../../../../services/api/studies/config/thematicTrimming"; interface Props { study: StudyMetadata; @@ -43,10 +43,11 @@ function ThematicTrimmingDialog(props: Props) { }); }; - const handleSubmit = ( - data: SubmitHandlerPlus, - ) => { - return setThematicTrimmingConfig(study.id, data.values); + const handleSubmit = (data: SubmitHandlerPlus) => { + return setThematicTrimmingConfig({ + studyId: study.id, + config: data.values, + }); }; //////////////////////////////////////////////////////////////// @@ -59,7 +60,7 @@ function ThematicTrimmingDialog(props: Props) { open={open} title="Thematic Trimming" config={{ - defaultValues: () => getThematicTrimmingFormFields(study.id), + defaultValues: () => getThematicTrimmingConfig({ studyId: study.id }), }} onSubmit={handleSubmit} onCancel={onClose} diff --git a/webapp/src/services/api/studies/config/thematicTrimming/index.ts b/webapp/src/services/api/studies/config/thematicTrimming/index.ts new file mode 100644 index 0000000000..0228eb98fc --- /dev/null +++ b/webapp/src/services/api/studies/config/thematicTrimming/index.ts @@ -0,0 +1,25 @@ +import type { + GetThematicTrimmingConfigParams, + SetThematicTrimmingConfigParams, + ThematicTrimmingConfig, +} from "./types"; +import client from "../../../client"; +import { format } from "../../../../../utils/stringUtils"; + +const URL = "/v1/studies/{studyId}/config/thematictrimming/form"; + +export async function getThematicTrimmingConfig({ + studyId, +}: GetThematicTrimmingConfigParams) { + const url = format(URL, { studyId }); + const res = await client.get(url); + return res.data; +} + +export async function setThematicTrimmingConfig({ + studyId, + config, +}: SetThematicTrimmingConfigParams) { + const url = format(URL, { studyId }); + await client.put(url, config); +} diff --git a/webapp/src/services/api/studies/config/thematicTrimming/types.ts b/webapp/src/services/api/studies/config/thematicTrimming/types.ts new file mode 100644 index 0000000000..1137a9536f --- /dev/null +++ b/webapp/src/services/api/studies/config/thematicTrimming/types.ts @@ -0,0 +1,111 @@ +import { StudyMetadata } from "../../../../../common/types"; + +export interface ThematicTrimmingConfig { + ovCost: boolean; + opCost: boolean; + mrgPrice: boolean; + co2Emis: boolean; + dtgByPlant: boolean; + balance: boolean; + rowBal: boolean; + psp: boolean; + miscNdg: boolean; + load: boolean; + hRor: boolean; + wind: boolean; + solar: boolean; + nuclear: boolean; + lignite: boolean; + coal: boolean; + gas: boolean; + oil: boolean; + mixFuel: boolean; + miscDtg: boolean; + hStor: boolean; + hPump: boolean; + hLev: boolean; + hInfl: boolean; + hOvfl: boolean; + hVal: boolean; + hCost: boolean; + unspEnrg: boolean; + spilEnrg: boolean; + lold: boolean; + lolp: boolean; + avlDtg: boolean; + dtgMrg: boolean; + maxMrg: boolean; + npCost: boolean; + npCostByPlant: boolean; + nodu: boolean; + noduByPlant: boolean; + flowLin: boolean; + ucapLin: boolean; + loopFlow: boolean; + flowQuad: boolean; + congFeeAlg: boolean; + congFeeAbs: boolean; + margCost: boolean; + congProbPlus: boolean; + congProbMinus: boolean; + hurdleCost: boolean; + // Since v8.1 + resGenerationByPlant?: boolean; + miscDtg2?: boolean; + miscDtg3?: boolean; + miscDtg4?: boolean; + windOffshore?: boolean; + windOnshore?: boolean; + solarConcrt?: boolean; + solarPv?: boolean; + solarRooft?: boolean; + renw1?: boolean; + renw2?: boolean; + renw3?: boolean; + renw4?: boolean; + // Since v8.3 + dens?: boolean; + profitByPlant?: boolean; + // Since v8.6 + stsInjByPlant?: boolean; + stsWithdrawalByPlant?: boolean; + stsLvlByPlant?: boolean; + pspOpenInjection?: boolean; + pspOpenWithdrawal?: boolean; + pspOpenLevel?: boolean; + pspClosedInjection?: boolean; + pspClosedWithdrawal?: boolean; + pspClosedLevel?: boolean; + pondageInjection?: boolean; + pondageWithdrawal?: boolean; + pondageLevel?: boolean; + batteryInjection?: boolean; + batteryWithdrawal?: boolean; + batteryLevel?: boolean; + other1Injection?: boolean; + other1Withdrawal?: boolean; + other1Level?: boolean; + other2Injection?: boolean; + other2Withdrawal?: boolean; + other2Level?: boolean; + other3Injection?: boolean; + other3Withdrawal?: boolean; + other3Level?: boolean; + other4Injection?: boolean; + other4Withdrawal?: boolean; + other4Level?: boolean; + other5Injection?: boolean; + other5Withdrawal?: boolean; + other5Level?: boolean; + // Since v8.8 + stsCashflowByCluster?: boolean; +} + +export interface GetThematicTrimmingConfigParams { + studyId: StudyMetadata["id"]; +} + +export interface SetThematicTrimmingConfigParams { + studyId: StudyMetadata["id"]; + config: ThematicTrimmingConfig; +} From 470eddec484be6874db9c1d29dae3fb599c6ed47 Mon Sep 17 00:00:00 2001 From: Samir Kamal <1954121+skamril@users.noreply.github.com> Date: Fri, 3 May 2024 15:59:00 +0200 Subject: [PATCH 129/147] fix(ui-common): block FormDialog closing when submitting --- webapp/src/components/common/dialogs/FormDialog.tsx | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/webapp/src/components/common/dialogs/FormDialog.tsx b/webapp/src/components/common/dialogs/FormDialog.tsx index 8f0ab7673f..96ea4ac12f 100644 --- a/webapp/src/components/common/dialogs/FormDialog.tsx +++ b/webapp/src/components/common/dialogs/FormDialog.tsx @@ -83,8 +83,10 @@ function FormDialog< //////////////////////////////////////////////////////////////// const handleClose: FormDialogProps["onClose"] = (...args) => { - onCancel(); - onClose?.(...args); + if (!isSubmitting) { + onCancel(); + onClose?.(...args); + } }; //////////////////////////////////////////////////////////////// From d011168aee620157fa4d914a47939f0a780e72ef Mon Sep 17 00:00:00 2001 From: Samir Kamal <1954121+skamril@users.noreply.github.com> Date: Fri, 3 May 2024 17:43:08 +0200 Subject: [PATCH 130/147] feat(ui-thematictrimming): add groups and sort by alphabetical order --- webapp/public/locales/en/main.json | 10 + webapp/public/locales/fr/main.json | 10 + .../dialogs/ThematicTrimmingDialog/index.tsx | 133 +++++-- .../dialogs/ThematicTrimmingDialog/utils.ts | 362 +++++++----------- 4 files changed, 256 insertions(+), 259 deletions(-) diff --git a/webapp/public/locales/en/main.json b/webapp/public/locales/en/main.json index 5710e7cb10..78d1ae6300 100644 --- a/webapp/public/locales/en/main.json +++ b/webapp/public/locales/en/main.json @@ -334,6 +334,16 @@ "study.configuration.general.mcScenarioPlaylist.weight": "Weight", "study.configuration.general.geographicTrimming": "Geographic trimming", "study.configuration.general.thematicTrimming": "Thematic trimming", + "study.configuration.general.thematicTrimming.action.enableAll": "Enable all", + "study.configuration.general.thematicTrimming.action.disableAll": "Disable all", + "study.configuration.general.thematicTrimming.action.reverse": "Reverse", + "study.configuration.general.thematicTrimming.action.collapseAll": "Collapse all", + "study.configuration.general.thematicTrimming.group.general": "General", + "study.configuration.general.thematicTrimming.group.generationHydro": "Generation / Hydro", + "study.configuration.general.thematicTrimming.group.generationRenewables": "Generation / Renewables", + "study.configuration.general.thematicTrimming.group.generationStStorages": "Generation / Short-Term Storages", + "study.configuration.general.thematicTrimming.group.generationThermals": "Generation / Thermals", + "study.configuration.general.thematicTrimming.group.links": "Links", "study.configuration.general.filtering": "Filtering", "study.configuration.optimization.legend.general": "General", "study.configuration.optimization.legend.links": "Links", diff --git a/webapp/public/locales/fr/main.json b/webapp/public/locales/fr/main.json index b8982996f0..b96a4d1735 100644 --- a/webapp/public/locales/fr/main.json +++ b/webapp/public/locales/fr/main.json @@ -334,6 +334,16 @@ "study.configuration.general.mcScenarioPlaylist.weight": "Weight", "study.configuration.general.geographicTrimming": "Geographic trimming", "study.configuration.general.thematicTrimming": "Thematic trimming", + "study.configuration.general.thematicTrimming.action.enableAll": "Activer tout", + "study.configuration.general.thematicTrimming.action.disableAll": "Désactiver tout", + "study.configuration.general.thematicTrimming.action.reverse": "Inverser", + "study.configuration.general.thematicTrimming.action.collapseAll": "Réduire tout", + "study.configuration.general.thematicTrimming.group.general": "Général", + "study.configuration.general.thematicTrimming.group.generationHydro": "Génération / Hydro", + "study.configuration.general.thematicTrimming.group.generationRenewables": "Génération / Renouvelables", + "study.configuration.general.thematicTrimming.group.generationStStorages": "Génération / Stockages court terme", + "study.configuration.general.thematicTrimming.group.generationThermals": "Génération / Thermiques", + "study.configuration.general.thematicTrimming.group.links": "Liens", "study.configuration.general.filtering": "Filtering", "study.configuration.optimization.legend.general": "Générale", "study.configuration.optimization.legend.links": "Liens", diff --git a/webapp/src/components/App/Singlestudy/explore/Configuration/General/dialogs/ThematicTrimmingDialog/index.tsx b/webapp/src/components/App/Singlestudy/explore/Configuration/General/dialogs/ThematicTrimmingDialog/index.tsx index 3988a13525..182aad6bcc 100644 --- a/webapp/src/components/App/Singlestudy/explore/Configuration/General/dialogs/ThematicTrimmingDialog/index.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Configuration/General/dialogs/ThematicTrimmingDialog/index.tsx @@ -1,4 +1,12 @@ -import { Box, Button, Divider, Unstable_Grid2 as Grid } from "@mui/material"; +import { + Accordion, + AccordionDetails, + AccordionSummary, + Button, + Divider, + Unstable_Grid2 as Grid, +} from "@mui/material"; +import ExpandMoreIcon from "@mui/icons-material/ExpandMore"; import * as R from "ramda"; import * as RA from "ramda-adjunct"; import { useState } from "react"; @@ -11,12 +19,18 @@ import { import SearchFE from "../../../../../../../common/fieldEditors/SearchFE"; import { isSearchMatching } from "../../../../../../../../utils/stringUtils"; import FormDialog from "../../../../../../../common/dialogs/FormDialog"; -import { getFieldNames } from "./utils"; +import { + THEMATIC_TRIMMING_GROUPS, + getFieldLabelsForGroup, + type ThematicTrimmingGroup, +} from "./utils"; import type { ThematicTrimmingConfig } from "../../../../../../../../services/api/studies/config/thematicTrimming/types"; import { getThematicTrimmingConfig, setThematicTrimmingConfig, } from "../../../../../../../../services/api/studies/config/thematicTrimming"; +import { useTranslation } from "react-i18next"; +import Stack from "@mui/material/Stack"; interface Props { study: StudyMetadata; @@ -26,21 +40,34 @@ interface Props { function ThematicTrimmingDialog(props: Props) { const { study, open, onClose } = props; + const { t } = useTranslation(); const [search, setSearch] = useState(""); + const [expanded, setExpanded] = useState(() => + THEMATIC_TRIMMING_GROUPS.reduce( + (acc, group) => ({ ...acc, [group]: true }), + {} as Partial>, + ), + ); + + const commonBtnProps = { + color: "secondary", + // Disable all buttons when search is active to remove confusion + // about which fields are being affected by the action (search or all) + disabled: !!search, + } as const; + //////////////////////////////////////////////////////////////// // Event Handlers //////////////////////////////////////////////////////////////// const handleUpdateConfig = - (api: UseFormReturnPlus, fn: RA.Pred) => () => { - setSearch(""); - - const valuesArr = R.toPairs(api.getValues()).filter(Boolean); - - valuesArr.forEach(([key, val]) => { - api.setValue(key, fn(val)); - }); + (api: UseFormReturnPlus, fn: RA.Pred) => () => { + R.toPairs(api.getValues()) + .filter(Boolean) + .forEach(([key, val]) => { + api.setValue(key, fn(val)); + }); }; const handleSubmit = (data: SubmitHandlerPlus) => { @@ -80,49 +107,83 @@ function ThematicTrimmingDialog(props: Props) { > {(api) => ( <> - + setSearch("")} size="small" /> - - - - - - - - {getFieldNames(api.getValues()) + + + + + {THEMATIC_TRIMMING_GROUPS.map((group) => { + const fields = getFieldLabelsForGroup(api.getValues(), group) .filter(([, label]) => isSearchMatching(search, label)) .map(([name, label]) => ( - ))} - + )); + + return fields.length > 0 ? ( + { + setExpanded((prev) => ({ ...prev, [group]: isExpanded })); + }} + disableGutters + > + }> + {t( + `study.configuration.general.thematicTrimming.group.${group}`, + )} + + + + {fields} + + + + ) : null; + })} )} diff --git a/webapp/src/components/App/Singlestudy/explore/Configuration/General/dialogs/ThematicTrimmingDialog/utils.ts b/webapp/src/components/App/Singlestudy/explore/Configuration/General/dialogs/ThematicTrimmingDialog/utils.ts index d5059668d3..61b29afa09 100644 --- a/webapp/src/components/App/Singlestudy/explore/Configuration/General/dialogs/ThematicTrimmingDialog/utils.ts +++ b/webapp/src/components/App/Singlestudy/explore/Configuration/General/dialogs/ThematicTrimmingDialog/utils.ts @@ -1,230 +1,146 @@ import * as R from "ramda"; -import { StudyMetadata } from "../../../../../../../../common/types"; -import client from "../../../../../../../../services/api/client"; +import { ThematicTrimmingConfig } from "../../../../../../../../services/api/studies/config/thematicTrimming/types"; +import { O } from "ts-toolbelt"; -export interface ThematicTrimmingFormFields { - ovCost: boolean; - opCost: boolean; - mrgPrice: boolean; - co2Emis: boolean; - dtgByPlant: boolean; - balance: boolean; - rowBal: boolean; - psp: boolean; - miscNdg: boolean; - load: boolean; - hRor: boolean; - wind: boolean; - solar: boolean; - nuclear: boolean; - lignite: boolean; - coal: boolean; - gas: boolean; - oil: boolean; - mixFuel: boolean; - miscDtg: boolean; - hStor: boolean; - hPump: boolean; - hLev: boolean; - hInfl: boolean; - hOvfl: boolean; - hVal: boolean; - hCost: boolean; - unspEnrg: boolean; - spilEnrg: boolean; - lold: boolean; - lolp: boolean; - avlDtg: boolean; - dtgMrg: boolean; - maxMrg: boolean; - npCost: boolean; - npCostByPlant: boolean; - nodu: boolean; - noduByPlant: boolean; - flowLin: boolean; - ucapLin: boolean; - loopFlow: boolean; - flowQuad: boolean; - congFeeAlg: boolean; - congFeeAbs: boolean; - margCost: boolean; - congProbPlus: boolean; - congProbMinus: boolean; - hurdleCost: boolean; - // Study version >= 810 - resGenerationByPlant?: boolean; - miscDtg2?: boolean; - miscDtg3?: boolean; - miscDtg4?: boolean; - windOffshore?: boolean; - windOnshore?: boolean; - solarConcrt?: boolean; - solarPv?: boolean; - solarRooft?: boolean; - renw1?: boolean; - renw2?: boolean; - renw3?: boolean; - renw4?: boolean; - // Study version >= 830 - dens?: boolean; - profitByPlant?: boolean; - // Study version >= 860 - stsInjByPlant?: boolean; - stsWithdrawalByPlant?: boolean; - stsLvlByPlant?: boolean; - pspOpenInjection?: boolean; - pspOpenWithdrawal?: boolean; - pspOpenLevel?: boolean; - pspClosedInjection?: boolean; - pspClosedWithdrawal?: boolean; - pspClosedLevel?: boolean; - pondageInjection?: boolean; - pondageWithdrawal?: boolean; - pondageLevel?: boolean; - batteryInjection?: boolean; - batteryWithdrawal?: boolean; - batteryLevel?: boolean; - other1Injection?: boolean; - other1Withdrawal?: boolean; - other1Level?: boolean; - other2Injection?: boolean; - other2Withdrawal?: boolean; - other2Level?: boolean; - other3Injection?: boolean; - other3Withdrawal?: boolean; - other3Level?: boolean; - other4Injection?: boolean; - other4Withdrawal?: boolean; - other4Level?: boolean; - other5Injection?: boolean; - other5Withdrawal?: boolean; - other5Level?: boolean; - // Study version >= 880 - stsCashflowByCluster?: boolean; -} +export const THEMATIC_TRIMMING_GROUPS = [ + "general", + "generationHydro", + "generationRenewables", + "generationStStorages", + "generationThermals", + "links", +] as const; + +export type ThematicTrimmingGroup = (typeof THEMATIC_TRIMMING_GROUPS)[number]; -const keysMap: Record = { - ovCost: "OV. COST", - opCost: "OP. COST", - mrgPrice: "MRG. PRICE", - co2Emis: "CO2 EMIS.", - dtgByPlant: "DTG BY PLANT", - balance: "BALANCE", - rowBal: "ROW BAL.", - psp: "PSP", - miscNdg: "MISC. NDG", - load: "LOAD", - hRor: "H. ROR", - wind: "WIND", - solar: "SOLAR", - nuclear: "NUCLEAR", - lignite: "LIGNITE", - coal: "COAL", - gas: "GAS", - oil: "OIL", - mixFuel: "MIX. FUEL", - miscDtg: "MISC. DTG", - hStor: "H. STOR", - hPump: "H. PUMP", - hLev: "H. LEV", - hInfl: "H. INFL", - hOvfl: "H. OVFL", - hVal: "H. VAL", - hCost: "H. COST", - unspEnrg: "UNSP. ENRG", - spilEnrg: "SPIL. ENRG", - lold: "LOLD", - lolp: "LOLP", - avlDtg: "AVL DTG", - dtgMrg: "DTG MRG", - maxMrg: "MAX MRG", - npCost: "NP COST", - npCostByPlant: "NP COST BY PLANT", - nodu: "NODU", - noduByPlant: "NODU BY PLANT", - flowLin: "FLOW LIN.", - ucapLin: "UCAP LIN.", - loopFlow: "LOOP FLOW", - flowQuad: "FLOW QUAD.", - congFeeAlg: "CONG. FEE (ALG.)", - congFeeAbs: "CONG. FEE (ABS.)", - margCost: "MARG. COST", - congProbPlus: "CONG. PROB +", - congProbMinus: "CONG. PROB -", - hurdleCost: "HURDLE COST", - // Study version >= 810 - resGenerationByPlant: "RES GENERATION BY PLANT", - miscDtg2: "MISC. DTG 2", - miscDtg3: "MISC. DTG 3", - miscDtg4: "MISC. DTG 4", - windOffshore: "WIND OFFSHORE", - windOnshore: "WIND ONSHORE", - solarConcrt: "SOLAR CONCRT.", - solarPv: "SOLAR PV", - solarRooft: "SOLAR ROOFT", - renw1: "RENW. 1", - renw2: "RENW. 2", - renw3: "RENW. 3", - renw4: "RENW. 4", - // Study version >= 830 - dens: "DENS", - profitByPlant: "PROFIT BY PLANT", - // Study version >= 860 - stsInjByPlant: "STS INJ BY PLANT", - stsWithdrawalByPlant: "STS WITHDRAWAL BY PLANT", - stsLvlByPlant: "STS LVL BY PLANT", - pspOpenInjection: "PSP OPEN INJECTION", - pspOpenWithdrawal: "PSP OPEN WITHDRAWAL", - pspOpenLevel: "PSP OPEN LEVEL", - pspClosedInjection: "PSP CLOSED INJECTION", - pspClosedWithdrawal: "PSP CLOSED WITHDRAWAL", - pspClosedLevel: "PSP CLOSED LEVEL", - pondageInjection: "PONDAGE INJECTION", - pondageWithdrawal: "PONDAGE WITHDRAWAL", - pondageLevel: "PONDAGE LEVEL", - batteryInjection: "BATTERY INJECTION", - batteryWithdrawal: "BATTERY WITHDRAWAL", - batteryLevel: "BATTERY LEVEL", - other1Injection: "OTHER1 INJECTION", - other1Withdrawal: "OTHER1 WITHDRAWAL", - other1Level: "OTHER1 LEVEL", - other2Injection: "OTHER2 INJECTION", - other2Withdrawal: "OTHER2 WITHDRAWAL", - other2Level: "OTHER2 LEVEL", - other3Injection: "OTHER3 INJECTION", - other3Withdrawal: "OTHER3 WITHDRAWAL", - other3Level: "OTHER3 LEVEL", - other4Injection: "OTHER4 INJECTION", - other4Withdrawal: "OTHER4 WITHDRAWAL", - other4Level: "OTHER4 LEVEL", - other5Injection: "OTHER5 INJECTION", - other5Withdrawal: "OTHER5 WITHDRAWAL", - other5Level: "OTHER5 LEVEL", - // Study version >= 880 - stsCashflowByCluster: "STS CASHFLOW BY CLUSTER", +const fieldLabelsByGroup: Record< + ThematicTrimmingGroup, + Partial> +> = { + general: { + balance: "BALANCE", + dens: "DENS", + load: "LOAD", + lold: "LOLD", + lolp: "LOLP", + miscNdg: "MISC. NDG", + mrgPrice: "MRG. PRICE", + opCost: "OP. COST", + ovCost: "OV. COST", + rowBal: "ROW BAL.", + spilEnrg: "SPIL. ENRG", + unspEnrg: "UNSP. ENRG", + }, + generationHydro: { + hCost: "H. COST", + hInfl: "H. INFL", + hLev: "H. LEV", + hOvfl: "H. OVFL", + hPump: "H. PUMP", + hRor: "H. ROR", + hStor: "H. STOR", + hVal: "H. VAL", + psp: "PSP", + }, + generationRenewables: { + renw1: "RENW. 1", + renw2: "RENW. 2", + renw3: "RENW. 3", + renw4: "RENW. 4", + resGenerationByPlant: "RES GENERATION BY PLANT", + solar: "SOLAR", + solarConcrt: "SOLAR CONCRT.", + solarPv: "SOLAR PV", + solarRooft: "SOLAR ROOFT", + wind: "WIND", + windOffshore: "WIND OFFSHORE", + windOnshore: "WIND ONSHORE", + }, + generationStStorages: { + batteryInjection: "BATTERY INJECTION", + batteryLevel: "BATTERY LEVEL", + batteryWithdrawal: "BATTERY WITHDRAWAL", + other1Injection: "OTHER1 INJECTION", + other1Level: "OTHER1 LEVEL", + other1Withdrawal: "OTHER1 WITHDRAWAL", + other2Injection: "OTHER2 INJECTION", + other2Level: "OTHER2 LEVEL", + other2Withdrawal: "OTHER2 WITHDRAWAL", + other3Injection: "OTHER3 INJECTION", + other3Level: "OTHER3 LEVEL", + other3Withdrawal: "OTHER3 WITHDRAWAL", + other4Injection: "OTHER4 INJECTION", + other4Level: "OTHER4 LEVEL", + other4Withdrawal: "OTHER4 WITHDRAWAL", + other5Injection: "OTHER5 INJECTION", + other5Level: "OTHER5 LEVEL", + other5Withdrawal: "OTHER5 WITHDRAWAL", + pondageInjection: "PONDAGE INJECTION", + pondageLevel: "PONDAGE LEVEL", + pondageWithdrawal: "PONDAGE WITHDRAWAL", + pspClosedInjection: "PSP CLOSED INJECTION", + pspClosedLevel: "PSP CLOSED LEVEL", + pspClosedWithdrawal: "PSP CLOSED WITHDRAWAL", + pspOpenInjection: "PSP OPEN INJECTION", + pspOpenLevel: "PSP OPEN LEVEL", + pspOpenWithdrawal: "PSP OPEN WITHDRAWAL", + stsCashflowByCluster: "STS CASHFLOW BY CLUSTER", + stsInjByPlant: "STS INJ BY PLANT", + stsLvlByPlant: "STS LVL BY PLANT", + stsWithdrawalByPlant: "STS WITHDRAWAL BY PLANT", + }, + generationThermals: { + avlDtg: "AVL DTG", + co2Emis: "CO2 EMIS.", + coal: "COAL", + dtgByPlant: "DTG BY PLANT", + dtgMrg: "DTG MRG", + gas: "GAS", + lignite: "LIGNITE", + maxMrg: "MAX MRG", + miscDtg: "MISC. DTG", + miscDtg2: "MISC. DTG 2", + miscDtg3: "MISC. DTG 3", + miscDtg4: "MISC. DTG 4", + mixFuel: "MIX. FUEL", + nodu: "NODU", + noduByPlant: "NODU BY PLANT", + npCost: "NP COST", + npCostByPlant: "NP COST BY PLANT", + nuclear: "NUCLEAR", + oil: "OIL", + profitByPlant: "PROFIT BY PLANT", + }, + links: { + congFeeAbs: "CONG. FEE (ABS.)", + congFeeAlg: "CONG. FEE (ALG.)", + congProbMinus: "CONG. PROB -", + congProbPlus: "CONG. PROB +", + flowLin: "FLOW LIN.", + flowQuad: "FLOW QUAD.", + hurdleCost: "HURDLE COST", + loopFlow: "LOOP FLOW", + margCost: "MARG. COST", + ucapLin: "UCAP LIN.", + }, }; -// Allow to support all study versions by using directly the server config -export function getFieldNames( - fields: ThematicTrimmingFormFields, -): Array<[keyof ThematicTrimmingFormFields, string]> { - return R.toPairs(R.pick(R.keys(fields), keysMap)); -} +/** + * Get thematic trimming field names and labels from specified config and group. + * + * Allows to support all study versions by only returning the fields that are present + * in the server response. + * + * @param config - Thematic trimming config. + * @param group - Thematic trimming form group. + * @returns Field names and labels in tuple format. + */ +export function getFieldLabelsForGroup( + config: ThematicTrimmingConfig, + group: ThematicTrimmingGroup, +) { + const labelsByName = R.pick(R.keys(config), fieldLabelsByGroup[group]); + const pairs = R.toPairs(labelsByName).filter(Boolean); -function makeRequestURL(studyId: StudyMetadata["id"]): string { - return `/v1/studies/${studyId}/config/thematictrimming/form`; + return R.sortBy(R.propOr("", "1"), pairs); } - -export const getThematicTrimmingFormFields = async ( - studyId: StudyMetadata["id"], -): Promise => { - const res = await client.get(makeRequestURL(studyId)); - return res.data; -}; - -export const setThematicTrimmingConfig = async ( - studyId: StudyMetadata["id"], - config: ThematicTrimmingFormFields, -): Promise => { - await client.put(makeRequestURL(studyId), config); -}; From 860afe7c7adf4c784e95deb506caacc7597de3d6 Mon Sep 17 00:00:00 2001 From: hatim dinia Date: Fri, 3 May 2024 18:54:08 +0200 Subject: [PATCH 131/147] feat(ui-thermal): add v8.7 thermal cluster fields --- webapp/public/locales/en/main.json | 9 ++- webapp/public/locales/fr/main.json | 9 ++- .../Modelization/Areas/Thermal/Fields.tsx | 57 ++++++++++++++++--- .../Modelization/Areas/Thermal/utils.ts | 10 +++- 4 files changed, 71 insertions(+), 14 deletions(-) diff --git a/webapp/public/locales/en/main.json b/webapp/public/locales/en/main.json index 78d1ae6300..6c18fe6d7d 100644 --- a/webapp/public/locales/en/main.json +++ b/webapp/public/locales/en/main.json @@ -202,7 +202,7 @@ "settings.error.groupRolesSave": "Role(s) for group '{{0}}' not saved", "settings.error.tokenSave": "'{{0}}' token not saved", "settings.error.updateMaintenance": "Maintenance mode not updated", - "settings.user.form.confirmPassword":"Confirm password", + "settings.user.form.confirmPassword": "Confirm password", "settings.user.form.error.passwordMismatch": "Passwords do not match", "launcher.additionalModes": "Additional modes", "launcher.autoUnzip": "Automatically unzip", @@ -487,10 +487,13 @@ "study.modelization.clusters.thermal.op5": "Other pollutant 5 (t/MWh)", "study.modelization.clusters.operatingCosts": "Operating costs", "study.modelization.clusters.marginalCost": "Marginal cost (€/MWh)", - "study.modelization.clusters.fixedCost": "Fixed costs (€/h)", + "study.modelization.clusters.fixedCost": "Fixed O&M costs (€/h)", "study.modelization.clusters.startupCost": "Startup cost (€)", "study.modelization.clusters.marketBidCost": "Market bid cost (€/MWh)", - "study.modelization.clusters.spreadCost": "Spread cost (€/MWh)", + "study.modelization.clusters.spreadCost": "Random spread (€/MWh)", + "study.modelization.clusters.variableOMCost": "Variable O&M costs (€/MWh)", + "study.modelization.clusters.costGeneration": "TS Costs", + "study.modelization.clusters.efficiency": "Efficiency (%)", "study.modelization.clusters.timeSeriesGen": "Time-Series generation", "study.modelization.clusters.genTs": "Generate Time-Series", "study.modelization.clusters.volatilityForced": "Volatility forced", diff --git a/webapp/public/locales/fr/main.json b/webapp/public/locales/fr/main.json index b96a4d1735..a9cb4027e8 100644 --- a/webapp/public/locales/fr/main.json +++ b/webapp/public/locales/fr/main.json @@ -486,11 +486,14 @@ "study.modelization.clusters.thermal.op4": "Autre polluant 4 (t/MWh)", "study.modelization.clusters.thermal.op5": "Autre polluant 5 (t/MWh)", "study.modelization.clusters.operatingCosts": "Coûts d'exploitation", - "study.modelization.clusters.marginalCost": "Coûts marginaux (€/MWh)", - "study.modelization.clusters.fixedCost": "Coûts fixes (€/h)", + "study.modelization.clusters.marginalCost": "Coût marginal (€/MWh)", + "study.modelization.clusters.fixedCost": "Coûts fixes O&M (€/h)", "study.modelization.clusters.startupCost": "Coûts de démarrage (€)", "study.modelization.clusters.marketBidCost": "Offre de marché (€/MWh)", - "study.modelization.clusters.spreadCost": "Spread (€/MWh)", + "study.modelization.clusters.spreadCost": "Random Spread (€/MWh)", + "study.modelization.clusters.variableOMCost": "Coûts variables O&M (€/MWh)", + "study.modelization.clusters.costGeneration": "TS Costs", + "study.modelization.clusters.efficiency": "Rendement (%)", "study.modelization.clusters.timeSeriesGen": "Génération des Séries temporelles", "study.modelization.clusters.genTs": "Générer des Séries temporelles", "study.modelization.clusters.volatilityForced": "Volatilité forcée", diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/Fields.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/Fields.tsx index ec5d6fc632..902c9091a2 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/Fields.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/Fields.tsx @@ -1,6 +1,7 @@ import { useTranslation } from "react-i18next"; import { useOutletContext } from "react-router"; import { StudyMetadata } from "../../../../../../../common/types"; +import Box from "@mui/material/Box"; import NumberFE from "../../../../../../common/fieldEditors/NumberFE"; import SelectFE from "../../../../../../common/fieldEditors/SelectFE"; import StringFE from "../../../../../../common/fieldEditors/StringFE"; @@ -8,6 +9,7 @@ import SwitchFE from "../../../../../../common/fieldEditors/SwitchFE"; import Fieldset from "../../../../../../common/Fieldset"; import { useFormContextPlus } from "../../../../../../common/Form"; import { + COST_GENERATION_OPTIONS, THERMAL_GROUPS, THERMAL_POLLUTANTS, ThermalCluster, @@ -17,16 +19,18 @@ import { function Fields() { const [t] = useTranslation(); - const { control } = useFormContextPlus(); + const { control, watch } = useFormContextPlus(); const { study } = useOutletContext<{ study: StudyMetadata }>(); const studyVersion = Number(study.version); + const isTSCosts = watch("costGeneration") === "useCostTimeseries"; //////////////////////////////////////////////////////////////// // JSX //////////////////////////////////////////////////////////////// return ( - <> + // TODO: remove the margin reset after updating MUI Theme. +
+ + + +
- + ); } diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/utils.ts b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/utils.ts index 8d5836a4e0..f6da40a698 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/utils.ts +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/utils.ts @@ -49,6 +49,11 @@ export const TS_GENERATION_OPTIONS = [ export const TS_LAW_OPTIONS = ["geometric", "uniform"] as const; +export const COST_GENERATION_OPTIONS = [ + "SetManually", + "useCostTimeseries", +] as const; + //////////////////////////////////////////////////////////////// // Types //////////////////////////////////////////////////////////////// @@ -57,7 +62,7 @@ export type ThermalGroup = (typeof THERMAL_GROUPS)[number]; type LocalTSGenerationBehavior = (typeof TS_GENERATION_OPTIONS)[number]; type TimeSeriesLawOption = (typeof TS_LAW_OPTIONS)[number]; - +type CostGeneration = (typeof COST_GENERATION_OPTIONS)[number]; type ThermalPollutants = { [K in (typeof THERMAL_POLLUTANTS)[number]]: number; }; @@ -84,6 +89,9 @@ export interface ThermalCluster extends ThermalPollutants { volatilityPlanned: number; lawForced: TimeSeriesLawOption; lawPlanned: TimeSeriesLawOption; + costGeneration: CostGeneration; + efficiency: number; + variableOMCost: number; } export type ThermalClusterWithCapacity = ClusterWithCapacity; From 365ef4d3845807f0f4384accbeb61889f005689d Mon Sep 17 00:00:00 2001 From: hatim dinia Date: Thu, 11 Apr 2024 16:23:49 +0200 Subject: [PATCH 132/147] feat(ui-thermal): add `validateNumber` helper --- webapp/public/locales/en/main.json | 5 +- webapp/public/locales/fr/main.json | 5 +- .../Modelization/Areas/Thermal/Fields.tsx | 97 ++++--------------- .../Modelization/Areas/Thermal/Matrix.tsx | 2 + webapp/src/utils/validationUtils.ts | 31 ++++++ 5 files changed, 57 insertions(+), 83 deletions(-) diff --git a/webapp/public/locales/en/main.json b/webapp/public/locales/en/main.json index 6c18fe6d7d..8f6154f733 100644 --- a/webapp/public/locales/en/main.json +++ b/webapp/public/locales/en/main.json @@ -126,6 +126,7 @@ "form.field.minLength": "{{0}} character(s) minimum", "form.field.minValue": "The minimum value is {{0}}", "form.field.maxValue": "The maximum value is {{0}}", + "form.field.invalidNumber": "Invalid number", "form.field.notAllowedValue": "Not allowed value", "form.field.specialChars": "Special characters allowed: {{0}}", "form.field.specialCharsNotAllowed": "Special characters are not allowed", @@ -491,8 +492,8 @@ "study.modelization.clusters.startupCost": "Startup cost (€)", "study.modelization.clusters.marketBidCost": "Market bid cost (€/MWh)", "study.modelization.clusters.spreadCost": "Random spread (€/MWh)", - "study.modelization.clusters.variableOMCost": "Variable O&M costs (€/MWh)", - "study.modelization.clusters.costGeneration": "TS Costs", + "study.modelization.clusters.variableOMCost": "Variable O&M cost (€/MWh)", + "study.modelization.clusters.costGeneration": "TS Cost", "study.modelization.clusters.efficiency": "Efficiency (%)", "study.modelization.clusters.timeSeriesGen": "Time-Series generation", "study.modelization.clusters.genTs": "Generate Time-Series", diff --git a/webapp/public/locales/fr/main.json b/webapp/public/locales/fr/main.json index a9cb4027e8..4db50746c7 100644 --- a/webapp/public/locales/fr/main.json +++ b/webapp/public/locales/fr/main.json @@ -126,6 +126,7 @@ "form.field.minLength": "{{0}} caractère(s) minimum", "form.field.minValue": "La valeur minimum est {{0}}", "form.field.maxValue": "La valeur maximum est {{0}}", + "form.field.invalidNumber": "Nombre invalide", "form.field.notAllowedValue": "Valeur non autorisée", "form.field.specialChars": "Caractères spéciaux autorisés: {{0}}", "form.field.specialCharsNotAllowed": "Les caractères spéciaux ne sont pas autorisés", @@ -491,8 +492,8 @@ "study.modelization.clusters.startupCost": "Coûts de démarrage (€)", "study.modelization.clusters.marketBidCost": "Offre de marché (€/MWh)", "study.modelization.clusters.spreadCost": "Random Spread (€/MWh)", - "study.modelization.clusters.variableOMCost": "Coûts variables O&M (€/MWh)", - "study.modelization.clusters.costGeneration": "TS Costs", + "study.modelization.clusters.variableOMCost": "Coût variable O&M (€/MWh)", + "study.modelization.clusters.costGeneration": "TS Cost", "study.modelization.clusters.efficiency": "Rendement (%)", "study.modelization.clusters.timeSeriesGen": "Génération des Séries temporelles", "study.modelization.clusters.genTs": "Générer des Séries temporelles", diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/Fields.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/Fields.tsx index 902c9091a2..15f67cdb41 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/Fields.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/Fields.tsx @@ -16,13 +16,14 @@ import { TS_GENERATION_OPTIONS, TS_LAW_OPTIONS, } from "./utils"; +import { validateNumber } from "../../../../../../../utils/validationUtils"; function Fields() { const [t] = useTranslation(); const { control, watch } = useFormContextPlus(); const { study } = useOutletContext<{ study: StudyMetadata }>(); const studyVersion = Number(study.version); - const isTSCosts = watch("costGeneration") === "useCostTimeseries"; + const isTSCost = watch("costGeneration") === "useCostTimeseries"; //////////////////////////////////////////////////////////////// // JSX @@ -73,10 +74,7 @@ function Fields() { name="unitCount" control={control} rules={{ - min: { - value: 1, - message: t("form.field.minValue", { 0: 1 }), - }, + validate: (v) => validateNumber(v, { min: 1 }), setValueAs: Math.floor, }} /> @@ -85,10 +83,7 @@ function Fields() { name="nominalCapacity" control={control} rules={{ - min: { - value: 0, - message: t("form.field.minValue", { 0: 0 }), - }, + validate: (v) => validateNumber(v, { min: 0 }), }} /> validateNumber(v, { min: 0, max: 100 }), }} /> validateNumber(v, { min: 1, max: 168 }), setValueAs: Math.floor, }} /> @@ -132,14 +113,7 @@ function Fields() { name="minDownTime" control={control} rules={{ - min: { - value: 1, - message: t("form.field.minValue", { 0: 1 }), - }, - max: { - value: 168, - message: t("form.field.maxValue", { 0: 168 }), - }, + validate: (v) => validateNumber(v, { min: 1, max: 168 }), setValueAs: Math.floor, }} /> @@ -160,22 +134,16 @@ function Fields() { name="efficiency" control={control} rules={{ - min: { - value: 0, - message: t("form.field.minValue", { 0: 0 }), - }, + validate: (v) => validateNumber(v, { min: 0 }), }} - disabled={!isTSCosts} + disabled={!isTSCost} /> validateNumber(v, { min: 0 }), }} /> @@ -184,10 +152,7 @@ function Fields() { name="startupCost" control={control} rules={{ - min: { - value: 0, - message: t("form.field.minValue", { 0: 0 }), - }, + validate: (v) => validateNumber(v, { min: 0 }), }} /> validateNumber(v, { min: 0 }), }} /> validateNumber(v, { min: 0 }), }} /> validateNumber(v, { min: 0 }), }} - disabled={!isTSCosts} + disabled={!isTSCost} /> validateNumber(v, { min: 0 }), }} /> ), @@ -265,14 +218,7 @@ function Fields() { name="volatilityForced" control={control} rules={{ - min: { - value: 0, - message: t("form.field.minValue", { 0: 0 }), - }, - max: { - value: 1, - message: t("form.field.maxValue", { 0: 1 }), - }, + validate: (v) => validateNumber(v, { min: 0, max: 1 }), }} inputProps={{ step: 0.1 }} /> @@ -281,14 +227,7 @@ function Fields() { name="volatilityPlanned" control={control} rules={{ - min: { - value: 0, - message: t("form.field.minValue", { 0: 0 }), - }, - max: { - value: 1, - message: t("form.field.maxValue", { 0: 1 }), - }, + validate: (v) => validateNumber(v, { min: 0, max: 1 }), }} inputProps={{ step: 0.1 }} /> diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/Matrix.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/Matrix.tsx index 10801c59a4..93f8a08e7d 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/Matrix.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/Matrix.tsx @@ -22,6 +22,7 @@ function Matrix({ study, areaId, clusterId }: Props) { const [value, setValue] = React.useState(0); const commonNames = [ + // TODO COMMON_MATIX_COLS "Marginal cost modulation", "Market bid modulation", "Capacity modulation", @@ -29,6 +30,7 @@ function Matrix({ study, areaId, clusterId }: Props) { ]; const tsGenNames = [ + // TODO TS_GEN_MATRIX_COLS "FO Duration", "PO Duration", "FO Rate", diff --git a/webapp/src/utils/validationUtils.ts b/webapp/src/utils/validationUtils.ts index 9af316cbba..5b3f60091d 100644 --- a/webapp/src/utils/validationUtils.ts +++ b/webapp/src/utils/validationUtils.ts @@ -150,6 +150,37 @@ export function validatePassword(password: string): string | true { return true; } +/** + * Validates a number against specified numerical limits. + * + * @param value - The number to validate. + * @param options - Configuration options for validation including min and max values. (Optional) + * @param [options.min=Number.MIN_SAFE_INTEGER] - Minimum allowed value for the number. + * @param [options.max=Number.MAX_SAFE_INTEGER] - Maximum allowed value for the number. + * @returns True if validation is successful, or a localized error message if it fails. + */ +export function validateNumber( + value: number, + options?: ValidationOptions, +): string | true { + if (typeof value !== "number" || isNaN(value) || !isFinite(value)) { + return t("form.field.invalidNumber", { value }); + } + + const { min = Number.MIN_SAFE_INTEGER, max = Number.MAX_SAFE_INTEGER } = + options || {}; + + if (value < min) { + return t("form.field.minValue", { 0: min }); + } + + if (value > max) { + return t("form.field.maxValue", { 0: max }); + } + + return true; +} + //////////////////////////////////////////////////////////////// // Utils //////////////////////////////////////////////////////////////// From 9de4e434e04c72d89fd07e95eb4c04c48f9fd889 Mon Sep 17 00:00:00 2001 From: hatim dinia Date: Thu, 11 Apr 2024 16:58:53 +0200 Subject: [PATCH 133/147] feat(ui-thermal): add v8.7 thermals matrices --- webapp/public/locales/en/main.json | 2 + webapp/public/locales/fr/main.json | 2 + .../Modelization/Areas/Thermal/Matrix.tsx | 136 +++++++++--------- .../Modelization/Areas/Thermal/utils.ts | 19 ++- .../common/EditableMatrix/index.tsx | 2 +- .../components/common/MatrixInput/index.tsx | 2 +- 6 files changed, 89 insertions(+), 74 deletions(-) diff --git a/webapp/public/locales/en/main.json b/webapp/public/locales/en/main.json index 8f6154f733..144538d16c 100644 --- a/webapp/public/locales/en/main.json +++ b/webapp/public/locales/en/main.json @@ -504,6 +504,8 @@ "study.modelization.clusters.matrix.common": "Common", "study.modelization.clusters.matrix.tsGen": "TS generator", "study.modelization.clusters.matrix.timeSeries": "Time-Series", + "study.modelization.clusters.matrix.fuelCost": "Fuel Cost", + "study.modelization.clusters.matrix.co2Cost": "CO2 Cost", "study.modelization.clusters.backClusterList": "Back to cluster list", "study.modelization.clusters.tsInterpretation": "TS interpretation", "studies.modelization.clusters.question.delete_one": "Are you sure you want to delete this cluster?", diff --git a/webapp/public/locales/fr/main.json b/webapp/public/locales/fr/main.json index 4db50746c7..7210751d43 100644 --- a/webapp/public/locales/fr/main.json +++ b/webapp/public/locales/fr/main.json @@ -504,6 +504,8 @@ "study.modelization.clusters.matrix.common": "Common", "study.modelization.clusters.matrix.tsGen": "TS generator", "study.modelization.clusters.matrix.timeSeries": "Séries temporelles", + "study.modelization.clusters.matrix.fuelCost": "Fuel Cost", + "study.modelization.clusters.matrix.co2Cost": "CO2 Cost", "study.modelization.clusters.backClusterList": "Retour à la liste des clusters", "study.modelization.clusters.tsInterpretation": "TS interpretation", "studies.modelization.clusters.question.delete_one": "Êtes-vous sûr de vouloir supprimer ce cluster ?", diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/Matrix.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/Matrix.tsx index 93f8a08e7d..4e4652f8a4 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/Matrix.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/Matrix.tsx @@ -1,5 +1,4 @@ -import * as React from "react"; -import * as R from "ramda"; +import { useState } from "react"; import Tabs from "@mui/material/Tabs"; import Tab from "@mui/material/Tab"; import Box from "@mui/material/Box"; @@ -10,6 +9,7 @@ import { StudyMetadata, } from "../../../../../../../common/types"; import MatrixInput from "../../../../../../common/MatrixInput"; +import { COMMON_MATRIX_COLS, TS_GEN_MATRIX_COLS } from "./utils"; interface Props { study: StudyMetadata; @@ -19,93 +19,87 @@ interface Props { function Matrix({ study, areaId, clusterId }: Props) { const [t] = useTranslation(); - const [value, setValue] = React.useState(0); + const [value, setValue] = useState(0); - const commonNames = [ - // TODO COMMON_MATIX_COLS - "Marginal cost modulation", - "Market bid modulation", - "Capacity modulation", - "Min gen modulation", - ]; - - const tsGenNames = [ - // TODO TS_GEN_MATRIX_COLS - "FO Duration", - "PO Duration", - "FO Rate", - "PO Rate", - "NPO Min", - "NPO Max", - ]; + //////////////////////////////////////////////////////////////// + // Event Handlers + //////////////////////////////////////////////////////////////// const handleChange = (event: React.SyntheticEvent, newValue: number) => { setValue(newValue); }; + //////////////////////////////////////////////////////////////// + // JSX + //////////////////////////////////////////////////////////////// + return ( - + + + - {R.cond([ - [ - () => value === 0, - () => ( - - ), - ], - [ - () => value === 1, - () => ( - - ), - ], - [ - R.T, - () => ( - - ), - ], - ])()} + {value === 0 && ( + + )} + {value === 1 && ( + + )} + {value === 2 && ( + + )} + {value === 3 && ( + + )} + {value === 4 && ( + + )} ); diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/utils.ts b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/utils.ts index f6da40a698..b5b0526ded 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/utils.ts +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/utils.ts @@ -11,6 +11,22 @@ import type { ClusterWithCapacity } from "../common/clustersUtils"; // Constants //////////////////////////////////////////////////////////////// +export const COMMON_MATRIX_COLS = [ + "Marginal cost modulation", + "Market bid modulation", + "Capacity modulation", + "Min gen modulation", +] as const; + +export const TS_GEN_MATRIX_COLS = [ + "FO Duration", + "PO Duration", + "FO Rate", + "PO Rate", + "NPO Min", + "NPO Max", +] as const; + export const THERMAL_GROUPS = [ "Gas", "Hard Coal", @@ -25,8 +41,8 @@ export const THERMAL_GROUPS = [ ] as const; export const THERMAL_POLLUTANTS = [ - // For study versions >= 860 "co2", + // Since v8.6 "so2", "nh3", "nox", @@ -89,6 +105,7 @@ export interface ThermalCluster extends ThermalPollutants { volatilityPlanned: number; lawForced: TimeSeriesLawOption; lawPlanned: TimeSeriesLawOption; + // Since v8.7 costGeneration: CostGeneration; efficiency: number; variableOMCost: number; diff --git a/webapp/src/components/common/EditableMatrix/index.tsx b/webapp/src/components/common/EditableMatrix/index.tsx index 3e17ed5aaa..7916fa2a1f 100644 --- a/webapp/src/components/common/EditableMatrix/index.tsx +++ b/webapp/src/components/common/EditableMatrix/index.tsx @@ -24,7 +24,7 @@ interface PropTypes { matrixTime: boolean; readOnly: boolean; onUpdate?: (change: MatrixEditDTO[], source: string) => void; - columnsNames?: string[]; + columnsNames?: string[] | readonly string[]; rowNames?: string[]; computStats?: MatrixStats; isPercentDisplayEnabled?: boolean; diff --git a/webapp/src/components/common/MatrixInput/index.tsx b/webapp/src/components/common/MatrixInput/index.tsx index fdeb92749f..d5c9a71f2a 100644 --- a/webapp/src/components/common/MatrixInput/index.tsx +++ b/webapp/src/components/common/MatrixInput/index.tsx @@ -29,7 +29,7 @@ const logErr = debug("antares:createimportform:error"); interface Props { study: StudyMetadata; url: string; - columnsNames?: string[]; + columnsNames?: string[] | readonly string[]; rowNames?: string[]; title?: string; computStats: MatrixStats; From 85598281f351507d56ae127450a562dba7188c88 Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE <43534797+laurent-laporte-pro@users.noreply.github.com> Date: Mon, 6 May 2024 20:45:36 +0200 Subject: [PATCH 134/147] feat(bc): add endpoint for multiple terms edition (#2020) --- antarest/core/exceptions.py | 62 ++++++-- .../business/binding_constraint_management.py | 143 +++++++++--------- antarest/study/web/study_data_blueprint.py | 79 +++++++++- .../test_binding_constraints.py | 116 +++++++++++--- 4 files changed, 292 insertions(+), 108 deletions(-) diff --git a/antarest/core/exceptions.py b/antarest/core/exceptions.py index 87804de393..0ca8af4f7d 100644 --- a/antarest/core/exceptions.py +++ b/antarest/core/exceptions.py @@ -376,11 +376,6 @@ def __init__(self, message: str) -> None: super().__init__(HTTPStatus.NOT_FOUND, message) -class ConstraintAlreadyExistError(HTTPException): - def __init__(self, message: str) -> None: - super().__init__(HTTPStatus.NOT_FOUND, message) - - class DuplicateConstraintName(HTTPException): def __init__(self, message: str) -> None: super().__init__(HTTPStatus.CONFLICT, message) @@ -406,14 +401,61 @@ def __init__(self, message: str) -> None: super().__init__(HTTPStatus.UNPROCESSABLE_ENTITY, message) -class MissingDataError(HTTPException): - def __init__(self, message: str) -> None: +class ConstraintTermNotFound(HTTPException): + """ + Exception raised when a constraint term is not found. + """ + + def __init__(self, binding_constraint_id: str, *ids: str) -> None: + count = len(ids) + id_enum = ", ".join(f"'{term}'" for term in ids) + message = { + 0: f"Constraint terms not found in BC '{binding_constraint_id}'", + 1: f"Constraint term {id_enum} not found in BC '{binding_constraint_id}'", + 2: f"Constraint terms {id_enum} not found in BC '{binding_constraint_id}'", + }[min(count, 2)] super().__init__(HTTPStatus.NOT_FOUND, message) + def __str__(self) -> str: + """Return a string representation of the exception.""" + return self.detail -class ConstraintIdNotFoundError(HTTPException): - def __init__(self, message: str) -> None: - super().__init__(HTTPStatus.NOT_FOUND, message) + +class DuplicateConstraintTerm(HTTPException): + """ + Exception raised when an attempt is made to create a constraint term which already exists. + """ + + def __init__(self, binding_constraint_id: str, *ids: str) -> None: + count = len(ids) + id_enum = ", ".join(f"'{term}'" for term in ids) + message = { + 0: f"Constraint terms already exist in BC '{binding_constraint_id}'", + 1: f"Constraint term {id_enum} already exists in BC '{binding_constraint_id}'", + 2: f"Constraint terms {id_enum} already exist in BC '{binding_constraint_id}'", + }[min(count, 2)] + super().__init__(HTTPStatus.CONFLICT, message) + + def __str__(self) -> str: + """Return a string representation of the exception.""" + return self.detail + + +class InvalidConstraintTerm(HTTPException): + """ + Exception raised when a constraint term is not correctly specified (no term data). + """ + + def __init__(self, binding_constraint_id: str, term_json: str) -> None: + message = ( + f"Invalid constraint term for binding constraint '{binding_constraint_id}': {term_json}," + f" term 'data' is missing or empty" + ) + super().__init__(HTTPStatus.UNPROCESSABLE_ENTITY, message) + + def __str__(self) -> str: + """Return a string representation of the exception.""" + return self.detail class LayerNotFound(HTTPException): diff --git a/antarest/study/business/binding_constraint_management.py b/antarest/study/business/binding_constraint_management.py index 9c29c5925d..28881ef874 100644 --- a/antarest/study/business/binding_constraint_management.py +++ b/antarest/study/business/binding_constraint_management.py @@ -10,14 +10,13 @@ from antarest.core.exceptions import ( BindingConstraintNotFound, - ConstraintAlreadyExistError, - ConstraintIdNotFoundError, + ConstraintTermNotFound, DuplicateConstraintName, + DuplicateConstraintTerm, InvalidConstraintName, + InvalidConstraintTerm, InvalidFieldForVersionError, MatrixWidthMismatchError, - MissingDataError, - NoConstraintError, WrongMatrixHeightError, ) from antarest.core.model import JSON @@ -805,90 +804,92 @@ def remove_binding_constraint(self, study: Study, binding_constraint_id: str) -> command = RemoveBindingConstraint(id=bc.id, command_context=command_context) execute_or_add_commands(study, file_study, [command], self.storage_service) - def update_constraint_term( - self, - study: Study, - binding_constraint_id: str, - term: ConstraintTerm, + def _update_constraint_with_terms( + self, study: Study, bc: ConstraintOutput, terms: t.Mapping[str, ConstraintTerm] ) -> None: - file_study = self.storage_service.get_storage(study).get_raw(study) - constraint = self.get_binding_constraint(study, binding_constraint_id) - constraint_terms = constraint.terms # existing constraint terms - if not constraint_terms: - raise NoConstraintError(study.id) - - term_id = term.id if isinstance(term, ConstraintTerm) else term - if term_id is None: - raise ConstraintIdNotFoundError(study.id) - - term_id_index = find_constraint_term_id(constraint_terms, term_id) - if term_id_index < 0: - raise ConstraintIdNotFoundError(study.id) - - if isinstance(term, ConstraintTerm): - updated_term_id = term.data.generate_id() if term.data else term_id - current_constraint = constraint_terms[term_id_index] - - constraint_terms[term_id_index] = ConstraintTerm( - id=updated_term_id, - weight=term.weight or current_constraint.weight, - offset=term.offset, - data=term.data or current_constraint.data, - ) - else: - del constraint_terms[term_id_index] - - coeffs = {term.id: [term.weight, term.offset] if term.offset else [term.weight] for term in constraint_terms} - + coeffs = { + term_id: [term.weight, term.offset] if term.offset else [term.weight] for term_id, term in terms.items() + } command = UpdateBindingConstraint( - id=constraint.id, + id=bc.id, coeffs=coeffs, command_context=self.storage_service.variant_study_service.command_factory.command_context, ) + file_study = self.storage_service.get_storage(study).get_raw(study) execute_or_add_commands(study, file_study, [command], self.storage_service) - def create_constraint_term( + def update_constraint_terms( self, study: Study, binding_constraint_id: str, - constraint_term: ConstraintTerm, + constraint_terms: t.Sequence[ConstraintTerm], + update_mode: str = "replace", ) -> None: - file_study = self.storage_service.get_storage(study).get_raw(study) - constraint = self.get_binding_constraint(study, binding_constraint_id) - - if constraint_term.data is None: - raise MissingDataError("Add new constraint term : data is missing") + """ + Update or add the specified constraint terms. - constraint_id = constraint_term.data.generate_id() - constraint_terms = constraint.terms or [] - if find_constraint_term_id(constraint_terms, constraint_id) >= 0: - raise ConstraintAlreadyExistError(study.id) + Args: + study: The study from which to update the binding constraint. + binding_constraint_id: The ID of the binding constraint to update. + constraint_terms: The constraint terms to update. + update_mode: The update mode, either "replace" or "add". + """ + if update_mode == "add": + for term in constraint_terms: + if term.data is None: + raise InvalidConstraintTerm(binding_constraint_id, term.json()) - constraint_terms.append( - ConstraintTerm( - id=constraint_id, - weight=constraint_term.weight if constraint_term.weight is not None else 0.0, - offset=constraint_term.offset, - data=constraint_term.data, - ) - ) + constraint = self.get_binding_constraint(study, binding_constraint_id) + existing_terms = collections.OrderedDict((term.generate_id(), term) for term in constraint.terms) + updated_terms = collections.OrderedDict((term.generate_id(), term) for term in constraint_terms) + + if update_mode == "replace": + missing_terms = set(updated_terms) - set(existing_terms) + if missing_terms: + raise ConstraintTermNotFound(binding_constraint_id, *missing_terms) + elif update_mode == "add": + duplicate_terms = set(updated_terms) & set(existing_terms) + if duplicate_terms: + raise DuplicateConstraintTerm(binding_constraint_id, *duplicate_terms) + else: # pragma: no cover + raise NotImplementedError(f"Unsupported update mode: {update_mode}") + + existing_terms.update(updated_terms) + self._update_constraint_with_terms(study, constraint, existing_terms) + + def create_constraint_terms( + self, study: Study, binding_constraint_id: str, constraint_terms: t.Sequence[ConstraintTerm] + ) -> None: + """ + Adds new constraint terms to an existing binding constraint. - coeffs = {term.id: [term.weight] + [term.offset] if term.offset else [term.weight] for term in constraint_terms} - command = UpdateBindingConstraint( - id=constraint.id, - coeffs=coeffs, - command_context=self.storage_service.variant_study_service.command_factory.command_context, - ) - execute_or_add_commands(study, file_study, [command], self.storage_service) + Args: + study: The study from which to update the binding constraint. + binding_constraint_id: The ID of the binding constraint to update. + constraint_terms: The constraint terms to add. + """ + return self.update_constraint_terms(study, binding_constraint_id, constraint_terms, update_mode="add") - # FIXME create a dedicated delete service def remove_constraint_term( self, study: Study, binding_constraint_id: str, term_id: str, ) -> None: - return self.update_constraint_term(study, binding_constraint_id, term_id) # type: ignore + """ + Remove a constraint term from an existing binding constraint. + + Args: + study: The study from which to update the binding constraint. + binding_constraint_id: The ID of the binding constraint to update. + term_id: The ID of the term to remove. + """ + constraint = self.get_binding_constraint(study, binding_constraint_id) + existing_terms = collections.OrderedDict((term.generate_id(), term) for term in constraint.terms) + removed_term = existing_terms.pop(term_id, None) + if removed_term is None: + raise ConstraintTermNotFound(binding_constraint_id, term_id) + self._update_constraint_with_terms(study, constraint, existing_terms) @staticmethod def get_table_schema() -> JSON: @@ -918,14 +919,6 @@ def _replace_matrices_according_to_frequency_and_version( return args -def find_constraint_term_id(constraints_term: t.Sequence[ConstraintTerm], constraint_term_id: str) -> int: - try: - index = [elm.id for elm in constraints_term].index(constraint_term_id) - return index - except ValueError: - return -1 - - def check_attributes_coherence(data: t.Union[ConstraintCreation, ConstraintInput], study_version: int) -> None: if study_version < 870: if data.group: diff --git a/antarest/study/web/study_data_blueprint.py b/antarest/study/web/study_data_blueprint.py index ecdd5ff191..467aa49805 100644 --- a/antarest/study/web/study_data_blueprint.py +++ b/antarest/study/web/study_data_blueprint.py @@ -59,7 +59,6 @@ from antarest.study.business.timeseries_config_management import TSFormFields from antarest.study.model import PatchArea, PatchCluster from antarest.study.service import StudyService -from antarest.study.storage.rawstudy.model.filesystem.config.area import AreaUI from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import ( BindingConstraintFrequency, BindingConstraintOperator, @@ -1204,14 +1203,49 @@ def add_constraint_term( binding_constraint_id: str, term: ConstraintTerm, current_user: JWTUser = Depends(auth.get_current_user), - ) -> t.Any: + ) -> None: + """ + Append a new term to a given binding constraint + + Args: + - `uuid`: The UUID of the study. + - `binding_constraint_id`: The binding constraint ID. + - `term`: The term to create. + """ logger.info( f"Add constraint term {term.id} to {binding_constraint_id} for study {uuid}", extra={"user": current_user.id}, ) params = RequestParameters(user=current_user) study = study_service.check_study_access(uuid, StudyPermissionType.WRITE, params) - return study_service.binding_constraint_manager.create_constraint_term(study, binding_constraint_id, term) + return study_service.binding_constraint_manager.create_constraint_terms(study, binding_constraint_id, [term]) + + @bp.post( + "/studies/{uuid}/bindingconstraints/{binding_constraint_id}/terms", + tags=[APITag.study_data], + summary="Create terms for a given binding constraint", + ) + def add_constraint_terms( + uuid: str, + binding_constraint_id: str, + terms: t.Sequence[ConstraintTerm], + current_user: JWTUser = Depends(auth.get_current_user), + ) -> None: + """ + Append new terms to a given binding constraint + + Args: + - `uuid`: The UUID of the study. + - `binding_constraint_id`: The binding constraint ID. + - `terms`: The list of terms to create. + """ + logger.info( + f"Adding constraint terms to {binding_constraint_id} for study {uuid}", + extra={"user": current_user.id}, + ) + params = RequestParameters(user=current_user) + study = study_service.check_study_access(uuid, StudyPermissionType.WRITE, params) + return study_service.binding_constraint_manager.create_constraint_terms(study, binding_constraint_id, terms) @bp.put( "/studies/{uuid}/bindingconstraints/{binding_constraint_id}/term", @@ -1223,14 +1257,49 @@ def update_constraint_term( binding_constraint_id: str, term: ConstraintTerm, current_user: JWTUser = Depends(auth.get_current_user), - ) -> t.Any: + ) -> None: + """ + Update a term for a given binding constraint + + Args: + - `uuid`: The UUID of the study. + - `binding_constraint_id`: The binding constraint ID. + - `term`: The term to update. + """ logger.info( f"Update constraint term {term.id} from {binding_constraint_id} for study {uuid}", extra={"user": current_user.id}, ) params = RequestParameters(user=current_user) study = study_service.check_study_access(uuid, StudyPermissionType.WRITE, params) - return study_service.binding_constraint_manager.update_constraint_term(study, binding_constraint_id, term) + return study_service.binding_constraint_manager.update_constraint_terms(study, binding_constraint_id, [term]) + + @bp.put( + "/studies/{uuid}/bindingconstraints/{binding_constraint_id}/terms", + tags=[APITag.study_data], + summary="Update terms for a given binding constraint", + ) + def update_constraint_terms( + uuid: str, + binding_constraint_id: str, + terms: t.Sequence[ConstraintTerm], + current_user: JWTUser = Depends(auth.get_current_user), + ) -> None: + """ + Update several terms for a given binding constraint + + Args: + - `uuid`: The UUID of the study. + - `binding_constraint_id`: The binding constraint ID. + - `terms`: The list of terms to update. + """ + logger.info( + f"Updating constraint terms from {binding_constraint_id} for study {uuid}", + extra={"user": current_user.id}, + ) + params = RequestParameters(user=current_user) + study = study_service.check_study_access(uuid, StudyPermissionType.WRITE, params) + return study_service.binding_constraint_manager.update_constraint_terms(study, binding_constraint_id, terms) @bp.delete( "/studies/{uuid}/bindingconstraints/{binding_constraint_id}/term/{term_id}", diff --git a/tests/integration/study_data_blueprint/test_binding_constraints.py b/tests/integration/study_data_blueprint/test_binding_constraints.py index 29394c3b2f..fff973ae20 100644 --- a/tests/integration/study_data_blueprint/test_binding_constraints.py +++ b/tests/integration/study_data_blueprint/test_binding_constraints.py @@ -375,17 +375,15 @@ def test_lifecycle__nominal(self, client: TestClient, user_access_token: str, st # Update constraint cluster term with invalid id res = client.put( f"/v1/studies/{study_id}/bindingconstraints/{bc_id}/term", - json={ - "id": f"{area1_id}.!!Invalid#cluster%%", - "weight": 4, - }, + json={"id": f"{area1_id}.!!invalid#cluster%%", "weight": 4}, headers=user_headers, ) assert res.status_code == 404, res.json() - assert res.json() == { - "description": f"{study_id}", - "exception": "ConstraintIdNotFoundError", - } + exception = res.json()["exception"] + description = res.json()["description"] + assert exception == "ConstraintTermNotFound" + assert bc_id in description + assert f"{area1_id}.!!invalid#cluster%%" in description # Update constraint cluster term with empty data res = client.put( @@ -683,6 +681,19 @@ def test_for_version_870(self, client: TestClient, admin_access_token: str, stud ) assert res.status_code == 200, res.json() + # Create a cluster in area1 + res = client.post( + f"/v1/studies/{study_id}/areas/{area1_id}/clusters/thermal", + headers=admin_headers, + json={ + "name": "Cluster 1", + "group": "Nuclear", + }, + ) + assert res.status_code == 200, res.json() + cluster_id = res.json()["id"] + assert cluster_id == "Cluster 1" + # ============================= # CREATION # ============================= @@ -744,34 +755,103 @@ def test_for_version_870(self, client: TestClient, admin_access_token: str, stud # CONSTRAINT TERM MANAGEMENT # ============================= - # Add binding constraint link term + # Add binding constraint terms res = client.post( - f"/v1/studies/{study_id}/bindingconstraints/{bc_id_w_group}/term", - json={ - "weight": 1, - "offset": 2.5, + f"/v1/studies/{study_id}/bindingconstraints/{bc_id_w_group}/terms", + json=[ + {"weight": 1, "offset": 2, "data": {"area1": area1_id, "area2": area2_id}}, + {"weight": 1, "offset": 2, "data": {"area": area1_id, "cluster": cluster_id}}, + ], + headers=admin_headers, + ) + assert res.status_code == 200, res.json() + + # Attempt to add a term with missing data + res = client.post( + f"/v1/studies/{study_id}/bindingconstraints/{bc_id_w_group}/terms", + json=[{"weight": 1, "offset": 2}], + headers=admin_headers, + ) + assert res.status_code == 422, res.json() + exception = res.json()["exception"] + description = res.json()["description"] + assert exception == "InvalidConstraintTerm" + assert bc_id_w_group in description, "Error message should contain the binding constraint ID" + assert "term 'data' is missing" in description, "Error message should indicate the missing field" + + # Attempt to add a duplicate term + res = client.post( + f"/v1/studies/{study_id}/bindingconstraints/{bc_id_w_group}/terms", + json=[{"weight": 99, "offset": 0, "data": {"area1": area1_id, "area2": area2_id}}], + headers=admin_headers, + ) + assert res.status_code == 409, res.json() + exception = res.json()["exception"] + description = res.json()["description"] + assert exception == "DuplicateConstraintTerm" + assert bc_id_w_group in description, "Error message should contain the binding constraint ID" + assert f"{area1_id}%{area2_id}" in description, "Error message should contain the duplicate term ID" + + # Get binding constraints list to check added terms + res = client.get( + f"/v1/studies/{study_id}/bindingconstraints/{bc_id_w_group}", + headers=admin_headers, + ) + assert res.status_code == 200, res.json() + binding_constraint = res.json() + constraint_terms = binding_constraint["terms"] + expected = [ + { "data": {"area1": area1_id, "area2": area2_id}, + "id": f"{area1_id}%{area2_id}", + "offset": 2, + "weight": 1.0, + }, + { + "data": {"area": area1_id, "cluster": cluster_id.lower()}, + "id": f"{area1_id}.{cluster_id.lower()}", + "offset": 2, + "weight": 1.0, }, + ] + assert constraint_terms == expected + + # Update binding constraint terms + res = client.put( + f"/v1/studies/{study_id}/bindingconstraints/{bc_id_w_group}/terms", + json=[ + {"id": f"{area1_id}%{area2_id}", "weight": 4.4, "offset": 1}, + { + "id": f"{area1_id}.{cluster_id}", + "weight": 5.1, + "data": {"area": area1_id, "cluster": cluster_id}, + }, + ], headers=admin_headers, ) assert res.status_code == 200, res.json() - # Get binding constraints list to check added term + # Asserts terms were updated res = client.get( f"/v1/studies/{study_id}/bindingconstraints/{bc_id_w_group}", headers=admin_headers, ) assert res.status_code == 200, res.json() binding_constraint = res.json() - assert binding_constraint["group"] == "specific_grp" # asserts the group wasn't altered constraint_terms = binding_constraint["terms"] expected = [ { "data": {"area1": area1_id, "area2": area2_id}, "id": f"{area1_id}%{area2_id}", - "offset": 2, # asserts the offset has been rounded - "weight": 1.0, - } + "offset": 1, + "weight": 4.4, + }, + { + "data": {"area": area1_id, "cluster": cluster_id.lower()}, + "id": f"{area1_id}.{cluster_id.lower()}", + "offset": None, + "weight": 5.1, + }, ] assert constraint_terms == expected From ed734d884fdb10ce624d96bb92e7acbb917bc773 Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE <43534797+laurent-laporte-pro@users.noreply.github.com> Date: Tue, 7 May 2024 18:14:39 +0200 Subject: [PATCH 135/147] fix(tablemode): hide `adequacy_patch_mode` column from table-mode before v8.3 (#2022) Merge pull request #2022 from AntaresSimulatorTeam/fix/ANT-1675-table-mode --- antarest/study/business/area_management.py | 5 +- .../study/business/table_mode_management.py | 4 +- .../study_data_blueprint/test_table_mode.py | 836 +++++++++--------- 3 files changed, 433 insertions(+), 412 deletions(-) diff --git a/antarest/study/business/area_management.py b/antarest/study/business/area_management.py index db04120884..8f0758d9dc 100644 --- a/antarest/study/business/area_management.py +++ b/antarest/study/business/area_management.py @@ -232,8 +232,11 @@ def _to_optimization(self) -> OptimizationProperties: nodal_optimization=nodal_optimization_section, ) - def _to_adequacy_patch(self) -> AdequacyPathProperties: + def _to_adequacy_patch(self) -> t.Optional[AdequacyPathProperties]: obj = {name: getattr(self, name) for name in AdequacyPathProperties.AdequacyPathSection.__fields__} + # If all fields are `None`, the object is empty. + if all(value is None for value in obj.values()): + return None adequacy_path_section = AdequacyPathProperties.AdequacyPathSection(**obj) return AdequacyPathProperties(adequacy_patch=adequacy_path_section) diff --git a/antarest/study/business/table_mode_management.py b/antarest/study/business/table_mode_management.py index cbbf5358cc..65687af9c9 100644 --- a/antarest/study/business/table_mode_management.py +++ b/antarest/study/business/table_mode_management.py @@ -174,10 +174,10 @@ def update_table_data( The updated properties of the objects including the old ones. """ if table_type == TableModeType.AREA: - # Use AreaOutput to update properties of areas + # Use AreaOutput to update properties of areas, which may include `None` values area_props_by_ids = {key: AreaOutput(**values) for key, values in data.items()} areas_map = self._area_manager.update_areas_props(study, area_props_by_ids) - data = {area_id: area.dict(by_alias=True) for area_id, area in areas_map.items()} + data = {area_id: area.dict(by_alias=True, exclude_none=True) for area_id, area in areas_map.items()} return data elif table_type == TableModeType.LINK: links_map = {tuple(key.split(" / ")): LinkOutput(**values) for key, values in data.items()} diff --git a/tests/integration/study_data_blueprint/test_table_mode.py b/tests/integration/study_data_blueprint/test_table_mode.py index 15536b8426..45ca2cc961 100644 --- a/tests/integration/study_data_blueprint/test_table_mode.py +++ b/tests/integration/study_data_blueprint/test_table_mode.py @@ -1,9 +1,14 @@ +import typing as t + import pytest from starlette.testclient import TestClient from antarest.core.tasks.model import TaskStatus from tests.integration.utils import wait_task_completion +# noinspection SpellCheckingInspection +POLLUTANTS_860 = ("nh3", "nmvoc", "nox", "op1", "op2", "op3", "op4", "op5", "pm10", "pm25", "pm5", "so2") + # noinspection SpellCheckingInspection @pytest.mark.unit_test @@ -15,11 +20,9 @@ class TestTableMode: which contains the following areas: ["de", "es", "fr", "it"]. """ + @pytest.mark.parametrize("study_version", [0, 810, 830, 860, 870, 880]) def test_lifecycle__nominal( - self, - client: TestClient, - user_access_token: str, - study_id: str, + self, client: TestClient, user_access_token: str, study_id: str, study_version: int ) -> None: user_headers = {"Authorization": f"Bearer {user_access_token}"} @@ -28,30 +31,18 @@ def test_lifecycle__nominal( # or in version 8.6 for short-term storage and that the renewable clusters are enabled # in the study configuration. - # Upgrade the study to version 8.6 - res = client.put( - f"/v1/studies/{study_id}/upgrade", - headers={"Authorization": f"Bearer {user_access_token}"}, - params={"target_version": 860}, - ) - assert res.status_code == 200, res.json() + # Upgrade the study to the desired version + if study_version: + res = client.put( + f"/v1/studies/{study_id}/upgrade", + headers={"Authorization": f"Bearer {user_access_token}"}, + params={"target_version": study_version}, + ) + assert res.status_code == 200, res.json() - task_id = res.json() - task = wait_task_completion(client, user_access_token, task_id) - assert task.status == TaskStatus.COMPLETED, task - - # Parameter 'renewable-generation-modelling' must be set to 'clusters' instead of 'aggregated'. - # The `enr_modelling` value must be set to "clusters" instead of "aggregated" - args = { - "target": "settings/generaldata/other preferences", - "data": {"renewable-generation-modelling": "clusters"}, - } - res = client.post( - f"/v1/studies/{study_id}/commands", - headers={"Authorization": f"Bearer {user_access_token}"}, - json=[{"action": "update_config", "args": args}], - ) - assert res.status_code == 200, res.json() + task_id = res.json() + task = wait_task_completion(client, user_access_token, task_id) + assert task.status == TaskStatus.COMPLETED, task # Table Mode - Area # ================= @@ -79,26 +70,29 @@ def test_lifecycle__nominal( "adequacyPatchMode", } + _de_values = { + "averageUnsuppliedEnergyCost": 3456, + "dispatchableHydroPower": False, + "filterSynthesis": "daily, monthly", # not changed + "filterYearByYear": "annual, weekly", + } + _es_values = {"spreadSpilledEnergyCost": None} # not changed + + if study_version >= 830: + _es_values["adequacyPatchMode"] = "inside" + res = client.put( f"/v1/studies/{study_id}/table-mode/areas", headers=user_headers, json={ - "de": { - "averageUnsuppliedEnergyCost": 3456, - "dispatchableHydroPower": False, - "filterSynthesis": "daily, monthly", # not changed - "filterYearByYear": "annual, weekly", - }, - "es": { - "adequacyPatchMode": "inside", - "spreadSpilledEnergyCost": None, # not changed - }, + "de": _de_values, + "es": _es_values, }, ) assert res.status_code == 200, res.json() + expected_areas: t.Dict[str, t.Dict[str, t.Any]] expected_areas = { "de": { - "adequacyPatchMode": "outside", "averageSpilledEnergyCost": 0, "averageUnsuppliedEnergyCost": 3456, "dispatchableHydroPower": False, @@ -110,7 +104,6 @@ def test_lifecycle__nominal( "spreadUnsuppliedEnergyCost": 0, }, "es": { - "adequacyPatchMode": "inside", "averageSpilledEnergyCost": 0, "averageUnsuppliedEnergyCost": 3000, "dispatchableHydroPower": True, @@ -122,7 +115,6 @@ def test_lifecycle__nominal( "spreadUnsuppliedEnergyCost": 0, }, "fr": { - "adequacyPatchMode": "outside", "averageSpilledEnergyCost": 0, "averageUnsuppliedEnergyCost": 3000, "dispatchableHydroPower": True, @@ -134,7 +126,6 @@ def test_lifecycle__nominal( "spreadUnsuppliedEnergyCost": 0, }, "it": { - "adequacyPatchMode": "outside", "averageSpilledEnergyCost": 0, "averageUnsuppliedEnergyCost": 3000, "dispatchableHydroPower": True, @@ -146,6 +137,13 @@ def test_lifecycle__nominal( "spreadUnsuppliedEnergyCost": 0, }, } + + if study_version >= 830: + expected_areas["de"]["adequacyPatchMode"] = "outside" + expected_areas["es"]["adequacyPatchMode"] = "inside" + expected_areas["fr"]["adequacyPatchMode"] = "outside" + expected_areas["it"]["adequacyPatchMode"] = "outside" + actual = res.json() assert actual == expected_areas @@ -319,22 +317,19 @@ def test_lifecycle__nominal( "variableOMCost", } + _solar_values = {"group": "Other 2", "nominalCapacity": 500000, "unitCount": 17} + _wind_on_values = {"group": "Nuclear", "nominalCapacity": 314159, "unitCount": 15, "co2": 123} + if study_version >= 860: + _solar_values["so2"] = 8.25 + if study_version >= 870: + _solar_values.update({"costGeneration": "useCostTimeseries", "efficiency": 87, "variableOMCost": -12.5}) + res = client.put( f"/v1/studies/{study_id}/table-mode/thermals", headers=user_headers, json={ - "de / 01_solar": { - "group": "Other 2", - "nominalCapacity": 500000, - "so2": 8.25, - "unitCount": 17, - }, - "de / 02_wind_on": { - "group": "Nuclear", - "nominalCapacity": 314159, - "co2": 123, - "unitCount": 15, - }, + "de / 01_solar": _solar_values, + "de / 02_wind_on": _wind_on_values, }, ) assert res.status_code == 200, res.json() @@ -357,19 +352,7 @@ def test_lifecycle__nominal( "minStablePower": 0, "minUpTime": 1, "mustRun": False, - "nh3": 0, - "nmvoc": 0, "nominalCapacity": 500000, - "nox": 0, - "op1": 0, - "op2": 0, - "op3": 0, - "op4": 0, - "op5": 0, - "pm10": 0, - "pm25": 0, - "pm5": 0, - "so2": 8.25, "spinning": 0, "spreadCost": 0, "startupCost": 0, @@ -396,19 +379,7 @@ def test_lifecycle__nominal( "minStablePower": 0, "minUpTime": 1, "mustRun": False, - "nh3": 0, - "nmvoc": 0, "nominalCapacity": 314159, - "nox": 0, - "op1": 0, - "op2": 0, - "op3": 0, - "op4": 0, - "op5": 0, - "pm10": 0, - "pm25": 0, - "pm5": 0, - "so2": 0, "spinning": 0, "spreadCost": 0, "startupCost": 0, @@ -418,6 +389,22 @@ def test_lifecycle__nominal( "volatilityPlanned": 0, }, } + + if study_version >= 860: + _values = dict.fromkeys(POLLUTANTS_860, 0) + expected_thermals["de / 02_wind_on"].update(_values) + expected_thermals["de / 01_solar"].update(_values, **{"so2": 8.25}) + else: + _values = dict.fromkeys(POLLUTANTS_860) + expected_thermals["de / 02_wind_on"].update(_values) + expected_thermals["de / 01_solar"].update(_values) + + if study_version >= 870: + _values = {"costGeneration": "SetManually", "efficiency": 100, "variableOMCost": 0} + expected_thermals["de / 02_wind_on"].update(_values) + _values = {"costGeneration": "useCostTimeseries", "efficiency": 87, "variableOMCost": -12.5} + expected_thermals["de / 01_solar"].update(_values) + assert res.json()["de / 01_solar"] == expected_thermals["de / 01_solar"] assert res.json()["de / 02_wind_on"] == expected_thermals["de / 02_wind_on"] @@ -427,340 +414,376 @@ def test_lifecycle__nominal( params={"columns": ",".join(["group", "unitCount", "nominalCapacity", "so2"])}, ) assert res.status_code == 200, res.json() + expected: t.Dict[str, t.Dict[str, t.Any]] expected = { - "de / 01_solar": {"group": "Other 2", "nominalCapacity": 500000, "so2": 8.25, "unitCount": 17}, - "de / 02_wind_on": {"group": "Nuclear", "nominalCapacity": 314159, "so2": 0, "unitCount": 15}, - "de / 03_wind_off": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "de / 04_res": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "de / 05_nuclear": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "de / 06_coal": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "de / 07_gas": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "de / 08_non-res": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "de / 09_hydro_pump": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "es / 01_solar": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "es / 02_wind_on": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "es / 03_wind_off": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "es / 04_res": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "es / 05_nuclear": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "es / 06_coal": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "es / 07_gas": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "es / 08_non-res": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "es / 09_hydro_pump": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "fr / 01_solar": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "fr / 02_wind_on": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "fr / 03_wind_off": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "fr / 04_res": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "fr / 05_nuclear": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "fr / 06_coal": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "fr / 07_gas": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "fr / 08_non-res": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "fr / 09_hydro_pump": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "it / 01_solar": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "it / 02_wind_on": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "it / 03_wind_off": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "it / 04_res": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "it / 05_nuclear": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "it / 06_coal": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "it / 07_gas": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "it / 08_non-res": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, - "it / 09_hydro_pump": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "de / 01_solar": {"group": "Other 2", "nominalCapacity": 500000, "unitCount": 17}, + "de / 02_wind_on": {"group": "Nuclear", "nominalCapacity": 314159, "unitCount": 15}, + "de / 03_wind_off": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "de / 04_res": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "de / 05_nuclear": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "de / 06_coal": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "de / 07_gas": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "de / 08_non-res": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "de / 09_hydro_pump": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "es / 01_solar": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "es / 02_wind_on": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "es / 03_wind_off": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "es / 04_res": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "es / 05_nuclear": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "es / 06_coal": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "es / 07_gas": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "es / 08_non-res": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "es / 09_hydro_pump": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "fr / 01_solar": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "fr / 02_wind_on": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "fr / 03_wind_off": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "fr / 04_res": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "fr / 05_nuclear": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "fr / 06_coal": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "fr / 07_gas": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "fr / 08_non-res": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "fr / 09_hydro_pump": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "it / 01_solar": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "it / 02_wind_on": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "it / 03_wind_off": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "it / 04_res": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "it / 05_nuclear": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "it / 06_coal": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "it / 07_gas": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "it / 08_non-res": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, + "it / 09_hydro_pump": {"group": "Other 1", "nominalCapacity": 1000000, "unitCount": 1}, } + if study_version >= 860: + for key in expected: + expected[key]["so2"] = 0 + expected["de / 01_solar"]["so2"] = 8.25 + actual = res.json() assert actual == expected # Table Mode - Renewable Clusters # =============================== - # Prepare data for renewable clusters tests - generators_by_country = { - "fr": { - "La Rochelle": { - "name": "La Rochelle", - "group": "solar pv", - "nominalCapacity": 2.1, - "unitCount": 1, - "tsInterpretation": "production-factor", - }, - "Oleron": { - "name": "Oleron", - "group": "wind offshore", - "nominalCapacity": 15, - "unitCount": 70, - "tsInterpretation": "production-factor", + # only concerns studies after v8.1 + if study_version >= 810: + # Parameter 'renewable-generation-modelling' must be set to 'clusters' instead of 'aggregated'. + # The `enr_modelling` value must be set to "clusters" instead of "aggregated" + args = { + "target": "settings/generaldata/other preferences", + "data": {"renewable-generation-modelling": "clusters"}, + } + res = client.post( + f"/v1/studies/{study_id}/commands", + headers={"Authorization": f"Bearer {user_access_token}"}, + json=[{"action": "update_config", "args": args}], + ) + assert res.status_code == 200, res.json() + + # Prepare data for renewable clusters tests + generators_by_country = { + "fr": { + "La Rochelle": { + "name": "La Rochelle", + "group": "solar pv", + "nominalCapacity": 2.1, + "unitCount": 1, + "tsInterpretation": "production-factor", + }, + "Oleron": { + "name": "Oleron", + "group": "wind offshore", + "nominalCapacity": 15, + "unitCount": 70, + "tsInterpretation": "production-factor", + }, + "Dieppe": { + "name": "Dieppe", + "group": "wind offshore", + "nominalCapacity": 8, + "unitCount": 62, + "tsInterpretation": "power-generation", + }, }, - "Dieppe": { - "name": "Dieppe", - "group": "wind offshore", - "nominalCapacity": 8, - "unitCount": 62, - "tsInterpretation": "power-generation", + "it": { + "Sicile": { + "name": "Sicile", + "group": "solar pv", + "nominalCapacity": 1.8, + "unitCount": 1, + "tsInterpretation": "production-factor", + }, + "Sardaigne": { + "name": "Sardaigne", + "group": "wind offshore", + "nominalCapacity": 12, + "unitCount": 86, + "tsInterpretation": "power-generation", + }, + "Pouilles": { + "name": "Pouilles", + "enabled": False, + "group": "wind offshore", + "nominalCapacity": 11, + "unitCount": 40, + "tsInterpretation": "power-generation", + }, }, - }, - "it": { - "Sicile": { - "name": "Sicile", - "group": "solar pv", - "nominalCapacity": 1.8, - "unitCount": 1, - "tsInterpretation": "production-factor", - }, - "Sardaigne": { - "name": "Sardaigne", - "group": "wind offshore", - "nominalCapacity": 12, - "unitCount": 86, - "tsInterpretation": "power-generation", + } + + for area_id, generators in generators_by_country.items(): + for generator_id, generator in generators.items(): + res = client.post( + f"/v1/studies/{study_id}/areas/{area_id}/clusters/renewable", + headers=user_headers, + json=generator, + ) + res.raise_for_status() + + # Get the schema of the renewables table + res = client.get( + "/v1/table-schema/renewables", + headers=user_headers, + ) + assert res.status_code == 200, res.json() + actual = res.json() + assert set(actual["properties"]) == { + # read-only fields + "id", + "name", + # Renewables fields + "group", + "tsInterpretation", + "enabled", + "unitCount", + "nominalCapacity", + } + + # Update some generators using the table mode + res = client.put( + f"/v1/studies/{study_id}/table-mode/renewables", + headers=user_headers, + json={ + "fr / Dieppe": {"enabled": False}, + "fr / La Rochelle": {"enabled": True, "nominalCapacity": 3.1, "unitCount": 2}, + "it / Pouilles": {"group": "Wind Onshore"}, }, - "Pouilles": { - "name": "Pouilles", - "enabled": False, - "group": "wind offshore", - "nominalCapacity": 11, - "unitCount": 40, - "tsInterpretation": "power-generation", - }, - }, - } - - for area_id, generators in generators_by_country.items(): - for generator_id, generator in generators.items(): - res = client.post( - f"/v1/studies/{study_id}/areas/{area_id}/clusters/renewable", - headers=user_headers, - json=generator, - ) - res.raise_for_status() - - # Get the schema of the renewables table - res = client.get( - "/v1/table-schema/renewables", - headers=user_headers, - ) - assert res.status_code == 200, res.json() - actual = res.json() - assert set(actual["properties"]) == { - # read-only fields - "id", - "name", - # Renewables fields - "group", - "tsInterpretation", - "enabled", - "unitCount", - "nominalCapacity", - } - - # Update some generators using the table mode - res = client.put( - f"/v1/studies/{study_id}/table-mode/renewables", - headers=user_headers, - json={ - "fr / Dieppe": {"enabled": False}, - "fr / La Rochelle": {"enabled": True, "nominalCapacity": 3.1, "unitCount": 2}, - "it / Pouilles": {"group": "Wind Onshore"}, - }, - ) - assert res.status_code == 200, res.json() - - res = client.get( - f"/v1/studies/{study_id}/table-mode/renewables", - headers=user_headers, - params={"columns": ",".join(["group", "enabled", "unitCount", "nominalCapacity"])}, - ) - assert res.status_code == 200, res.json() - expected = { - "fr / Dieppe": {"enabled": False, "group": "Wind Offshore", "nominalCapacity": 8, "unitCount": 62}, - "fr / La Rochelle": {"enabled": True, "group": "Solar PV", "nominalCapacity": 3.1, "unitCount": 2}, - "fr / Oleron": {"enabled": True, "group": "Wind Offshore", "nominalCapacity": 15, "unitCount": 70}, - "it / Pouilles": {"enabled": False, "group": "Wind Onshore", "nominalCapacity": 11, "unitCount": 40}, - "it / Sardaigne": {"enabled": True, "group": "Wind Offshore", "nominalCapacity": 12, "unitCount": 86}, - "it / Sicile": {"enabled": True, "group": "Solar PV", "nominalCapacity": 1.8, "unitCount": 1}, - } - actual = res.json() - assert actual == expected + ) + assert res.status_code == 200, res.json() + + res = client.get( + f"/v1/studies/{study_id}/table-mode/renewables", + headers=user_headers, + params={"columns": ",".join(["group", "enabled", "unitCount", "nominalCapacity"])}, + ) + assert res.status_code == 200, res.json() + expected = { + "fr / Dieppe": {"enabled": False, "group": "Wind Offshore", "nominalCapacity": 8, "unitCount": 62}, + "fr / La Rochelle": {"enabled": True, "group": "Solar PV", "nominalCapacity": 3.1, "unitCount": 2}, + "fr / Oleron": {"enabled": True, "group": "Wind Offshore", "nominalCapacity": 15, "unitCount": 70}, + "it / Pouilles": {"enabled": False, "group": "Wind Onshore", "nominalCapacity": 11, "unitCount": 40}, + "it / Sardaigne": {"enabled": True, "group": "Wind Offshore", "nominalCapacity": 12, "unitCount": 86}, + "it / Sicile": {"enabled": True, "group": "Solar PV", "nominalCapacity": 1.8, "unitCount": 1}, + } + actual = res.json() + assert actual == expected # Table Mode - Short Term Storage # =============================== - # Get the schema of the short-term storages table - res = client.get( - "/v1/table-schema/st-storages", - headers=user_headers, - ) - assert res.status_code == 200, res.json() - actual = res.json() - assert set(actual["properties"]) == { - # read-only fields - "id", - "name", - # Short-term storage fields - "enabled", # since v8.8 - "group", - "injectionNominalCapacity", - "withdrawalNominalCapacity", - "reservoirCapacity", - "efficiency", - "initialLevel", - "initialLevelOptim", - } - - # Prepare data for short-term storage tests - storage_by_country = { - "fr": { - "siemens": { - "name": "Siemens", - "group": "battery", - "injectionNominalCapacity": 1500, - "withdrawalNominalCapacity": 1500, - "reservoirCapacity": 1500, + # only concerns studies after v8.6 + if study_version >= 860: + # Get the schema of the short-term storages table + res = client.get( + "/v1/table-schema/st-storages", + headers=user_headers, + ) + assert res.status_code == 200, res.json() + actual = res.json() + assert set(actual["properties"]) == { + # read-only fields + "id", + "name", + # Short-term storage fields + "enabled", # since v8.8 + "group", + "injectionNominalCapacity", + "withdrawalNominalCapacity", + "reservoirCapacity", + "efficiency", + "initialLevel", + "initialLevelOptim", + } + + # Prepare data for short-term storage tests + storage_by_country = { + "fr": { + "siemens": { + "name": "Siemens", + "group": "battery", + "injectionNominalCapacity": 1500, + "withdrawalNominalCapacity": 1500, + "reservoirCapacity": 1500, + "initialLevel": 0.5, + "initialLevelOptim": False, + }, + "tesla": { + "name": "Tesla", + "group": "battery", + "injectionNominalCapacity": 1200, + "withdrawalNominalCapacity": 1200, + "reservoirCapacity": 1200, + "initialLevelOptim": True, + }, + }, + "it": { + "storage3": { + "name": "storage3", + "group": "psp_open", + "injectionNominalCapacity": 1234, + "withdrawalNominalCapacity": 1020, + "reservoirCapacity": 1357, + "initialLevel": 1, + "initialLevelOptim": False, + }, + "storage4": { + "name": "storage4", + "group": "psp_open", + "injectionNominalCapacity": 567, + "withdrawalNominalCapacity": 456, + "reservoirCapacity": 500, + "initialLevelOptim": True, + }, + }, + } + for area_id, storages in storage_by_country.items(): + for storage_id, storage in storages.items(): + res = client.post( + f"/v1/studies/{study_id}/areas/{area_id}/storages", + headers=user_headers, + json=storage, + ) + res.raise_for_status() + + # Update some generators using the table mode + _fr_siemes_values = {"injectionNominalCapacity": 1550, "withdrawalNominalCapacity": 1550} + _fr_tesla_values = {"efficiency": 0.75, "initialLevel": 0.89, "initialLevelOptim": False} + _it_storage3_values = {"group": "Pondage"} + if study_version >= 880: + _it_storage3_values["enabled"] = False + + res = client.put( + f"/v1/studies/{study_id}/table-mode/st-storages", + headers=user_headers, + json={ + "fr / siemens": _fr_siemes_values, + "fr / tesla": _fr_tesla_values, + "it / storage3": _it_storage3_values, + }, + ) + assert res.status_code == 200, res.json() + actual = res.json() + expected = { + "fr / siemens": { + # "id": "siemens", + # "name": "Siemens", + "efficiency": 1, + "enabled": None, + "group": "Battery", "initialLevel": 0.5, "initialLevelOptim": False, + "injectionNominalCapacity": 1550, + "reservoirCapacity": 1500, + "withdrawalNominalCapacity": 1550, }, - "tesla": { - "name": "Tesla", - "group": "battery", + "fr / tesla": { + # "id": "tesla", + # "name": "Tesla", + "efficiency": 0.75, + "enabled": None, + "group": "Battery", + "initialLevel": 0.89, + "initialLevelOptim": False, "injectionNominalCapacity": 1200, - "withdrawalNominalCapacity": 1200, "reservoirCapacity": 1200, - "initialLevelOptim": True, + "withdrawalNominalCapacity": 1200, }, - }, - "it": { - "storage3": { - "name": "storage3", - "group": "psp_open", - "injectionNominalCapacity": 1234, - "withdrawalNominalCapacity": 1020, - "reservoirCapacity": 1357, + "it / storage3": { + # "id": "storage3", + # "name": "storage3", + "efficiency": 1, + "enabled": None, + "group": "Pondage", "initialLevel": 1, "initialLevelOptim": False, + "injectionNominalCapacity": 1234, + "reservoirCapacity": 1357, + "withdrawalNominalCapacity": 1020, }, - "storage4": { - "name": "storage4", - "group": "psp_open", + "it / storage4": { + # "id": "storage4", + # "name": "storage4", + "efficiency": 1, + "enabled": None, + "group": "PSP_open", + "initialLevel": 0.5, + "initialLevelOptim": True, "injectionNominalCapacity": 567, + "reservoirCapacity": 500, "withdrawalNominalCapacity": 456, + }, + } + + if study_version >= 880: + for key in expected: + expected[key]["enabled"] = True + expected["it / storage3"]["enabled"] = False + + assert actual == expected + + res = client.get( + f"/v1/studies/{study_id}/table-mode/st-storages", + headers=user_headers, + params={ + "columns": ",".join( + [ + "group", + "injectionNominalCapacity", + "withdrawalNominalCapacity", + "reservoirCapacity", + "unknowColumn", # should be ignored + ] + ), + }, + ) + assert res.status_code == 200, res.json() + expected = { + "fr / siemens": { + "group": "Battery", + "injectionNominalCapacity": 1550, + "reservoirCapacity": 1500, + "withdrawalNominalCapacity": 1550, + }, + "fr / tesla": { + "group": "Battery", + "injectionNominalCapacity": 1200, + "reservoirCapacity": 1200, + "withdrawalNominalCapacity": 1200, + }, + "it / storage3": { + "group": "Pondage", + "injectionNominalCapacity": 1234, + "reservoirCapacity": 1357, + "withdrawalNominalCapacity": 1020, + }, + "it / storage4": { + "group": "PSP_open", + "injectionNominalCapacity": 567, "reservoirCapacity": 500, - "initialLevelOptim": True, + "withdrawalNominalCapacity": 456, }, - }, - } - for area_id, storages in storage_by_country.items(): - for storage_id, storage in storages.items(): - res = client.post( - f"/v1/studies/{study_id}/areas/{area_id}/storages", - headers=user_headers, - json=storage, - ) - res.raise_for_status() - - # Update some generators using the table mode - res = client.put( - f"/v1/studies/{study_id}/table-mode/st-storages", - headers=user_headers, - json={ - "fr / siemens": {"injectionNominalCapacity": 1550, "withdrawalNominalCapacity": 1550}, - "fr / tesla": {"efficiency": 0.75, "initialLevel": 0.89, "initialLevelOptim": False}, - "it / storage3": {"group": "Pondage"}, - }, - ) - assert res.status_code == 200, res.json() - actual = res.json() - assert actual == { - "fr / siemens": { - # "id": "siemens", - # "name": "Siemens", - "efficiency": 1, - "enabled": None, - "group": "Battery", - "initialLevel": 0.5, - "initialLevelOptim": False, - "injectionNominalCapacity": 1550, - "reservoirCapacity": 1500, - "withdrawalNominalCapacity": 1550, - }, - "fr / tesla": { - # "id": "tesla", - # "name": "Tesla", - "efficiency": 0.75, - "enabled": None, - "group": "Battery", - "initialLevel": 0.89, - "initialLevelOptim": False, - "injectionNominalCapacity": 1200, - "reservoirCapacity": 1200, - "withdrawalNominalCapacity": 1200, - }, - "it / storage3": { - # "id": "storage3", - # "name": "storage3", - "efficiency": 1, - "enabled": None, - "group": "Pondage", - "initialLevel": 1, - "initialLevelOptim": False, - "injectionNominalCapacity": 1234, - "reservoirCapacity": 1357, - "withdrawalNominalCapacity": 1020, - }, - "it / storage4": { - # "id": "storage4", - # "name": "storage4", - "efficiency": 1, - "enabled": None, - "group": "PSP_open", - "initialLevel": 0.5, - "initialLevelOptim": True, - "injectionNominalCapacity": 567, - "reservoirCapacity": 500, - "withdrawalNominalCapacity": 456, - }, - } - - res = client.get( - f"/v1/studies/{study_id}/table-mode/st-storages", - headers=user_headers, - params={ - "columns": ",".join( - [ - "group", - "injectionNominalCapacity", - "withdrawalNominalCapacity", - "reservoirCapacity", - "unknowColumn", # should be ignored - ] - ), - }, - ) - assert res.status_code == 200, res.json() - expected = { - "fr / siemens": { - "group": "Battery", - "injectionNominalCapacity": 1550, - "reservoirCapacity": 1500, - "withdrawalNominalCapacity": 1550, - }, - "fr / tesla": { - "group": "Battery", - "injectionNominalCapacity": 1200, - "reservoirCapacity": 1200, - "withdrawalNominalCapacity": 1200, - }, - "it / storage3": { - "group": "Pondage", - "injectionNominalCapacity": 1234, - "reservoirCapacity": 1357, - "withdrawalNominalCapacity": 1020, - }, - "it / storage4": { - "group": "PSP_open", - "injectionNominalCapacity": 567, - "reservoirCapacity": 500, - "withdrawalNominalCapacity": 456, - }, - } - actual = res.json() - assert actual == expected + } + actual = res.json() + assert actual == expected # Table Mode - Binding Constraints # ================================ @@ -819,7 +842,7 @@ def test_lifecycle__nominal( "id", "name", # Binding Constraints fields - "group", + "group", # since v8.7 "enabled", "timeStep", "operator", @@ -831,34 +854,46 @@ def test_lifecycle__nominal( } # Update some binding constraints using the table mode + _bc1_values = {"comments": "Hello World!", "enabled": True} + _bc2_values = {"filterSynthesis": "hourly", "filterYearByYear": "hourly", "operator": "both"} + if study_version >= 870: + _bc2_values["group"] = "My BC Group" + res = client.put( f"/v1/studies/{study_id}/table-mode/binding-constraints", headers=user_headers, json={ - "binding constraint 1": {"comments": "Hello World!", "enabled": True}, - "binding constraint 2": {"filterSynthesis": "hourly", "filterYearByYear": "hourly", "operator": "both"}, + "binding constraint 1": _bc1_values, + "binding constraint 2": _bc2_values, }, ) assert res.status_code == 200, res.json() actual = res.json() - assert actual == { + expected_binding = { "binding constraint 1": { "comments": "Hello World!", "enabled": True, - "filterSynthesis": "", - "filterYearByYear": "", "operator": "less", "timeStep": "hourly", }, "binding constraint 2": { "comments": "This is a binding constraint", "enabled": False, - "filterSynthesis": "hourly", - "filterYearByYear": "hourly", "operator": "both", "timeStep": "daily", }, } + if study_version >= 830: + expected_binding["binding constraint 1"]["filterSynthesis"] = "" + expected_binding["binding constraint 1"]["filterYearByYear"] = "" + expected_binding["binding constraint 2"]["filterSynthesis"] = "hourly" + expected_binding["binding constraint 2"]["filterYearByYear"] = "hourly" + + if study_version >= 870: + expected_binding["binding constraint 1"]["group"] = "default" + expected_binding["binding constraint 2"]["group"] = "My BC Group" + + assert actual == expected_binding res = client.get( f"/v1/studies/{study_id}/table-mode/binding-constraints", @@ -866,24 +901,7 @@ def test_lifecycle__nominal( params={"columns": ""}, ) assert res.status_code == 200, res.json() - expected = { - "binding constraint 1": { - "comments": "Hello World!", - "enabled": True, - "filterSynthesis": "", - "filterYearByYear": "", - "operator": "less", - "timeStep": "hourly", - }, - "binding constraint 2": { - "comments": "This is a binding constraint", - "enabled": False, - "filterSynthesis": "hourly", - "filterYearByYear": "hourly", - "operator": "both", - "timeStep": "daily", - }, - } + expected = expected_binding actual = res.json() assert actual == expected From c9cfd3d858c43a277027855e010904662dc3a0bc Mon Sep 17 00:00:00 2001 From: Hatim Dinia <33469289+hdinia@users.noreply.github.com> Date: Mon, 13 May 2024 16:40:13 +0200 Subject: [PATCH 136/147] fix(bc): handle undefined v8.3 fields (#2026) --- .../BindingConstView/ConstraintFields.tsx | 2 +- .../BindingConstraints/BindingConstView/utils.ts | 7 ++++++- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/BindingConstraints/BindingConstView/ConstraintFields.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/BindingConstraints/BindingConstView/ConstraintFields.tsx index 88fe3f2acc..3405540a6e 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/BindingConstraints/BindingConstView/ConstraintFields.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/BindingConstraints/BindingConstView/ConstraintFields.tsx @@ -115,7 +115,7 @@ function Fields({ study, constraintId }: Props) { control={control} sx={{ maxWidth: 150 }} /> - {Number(study.version) >= 840 && ( + {Number(study.version) >= 830 && ( Date: Mon, 13 May 2024 16:48:47 +0200 Subject: [PATCH 137/147] fix(ui-common): allow only import of TSV file in MatrixInput (#2027) --- webapp/src/components/common/MatrixInput/index.tsx | 3 +++ .../src/components/common/dialogs/ImportDialog.tsx | 12 +++++++++--- 2 files changed, 12 insertions(+), 3 deletions(-) diff --git a/webapp/src/components/common/MatrixInput/index.tsx b/webapp/src/components/common/MatrixInput/index.tsx index fdeb92749f..f2250bd3bc 100644 --- a/webapp/src/components/common/MatrixInput/index.tsx +++ b/webapp/src/components/common/MatrixInput/index.tsx @@ -205,6 +205,9 @@ function MatrixInput({ dropzoneText={t("matrix.message.importHint")} onClose={() => setOpenImportDialog(false)} onImport={handleImport} + accept={{ + "text/tsv": [".tsv"], + }} /> )} {openMatrixAsignDialog && ( diff --git a/webapp/src/components/common/dialogs/ImportDialog.tsx b/webapp/src/components/common/dialogs/ImportDialog.tsx index a5e0402edd..e4aec2498f 100644 --- a/webapp/src/components/common/dialogs/ImportDialog.tsx +++ b/webapp/src/components/common/dialogs/ImportDialog.tsx @@ -1,7 +1,7 @@ import { useEffect, useState } from "react"; import * as R from "ramda"; import { Box, LinearProgress, Paper, Typography } from "@mui/material"; -import Dropzone from "react-dropzone"; +import Dropzone, { type Accept } from "react-dropzone"; import { useMountedState } from "react-use"; import { useTranslation } from "react-i18next"; import BasicDialog, { BasicDialogProps } from "./BasicDialog"; @@ -10,6 +10,7 @@ interface Props { open: BasicDialogProps["open"]; title?: string; dropzoneText?: string; + accept?: Accept; onClose: VoidFunction; onImport: ( file: File, @@ -18,7 +19,7 @@ interface Props { } function ImportDialog(props: Props) { - const { open, title, dropzoneText, onClose, onImport } = props; + const { open, title, dropzoneText, accept, onClose, onImport } = props; const [t] = useTranslation(); const [isUploading, setIsUploading] = useState(false); const [uploadProgress, setUploadProgress] = useState(-1); @@ -92,7 +93,12 @@ function ImportDialog(props: Props) { value={uploadProgress} /> ) : ( - + {({ getRootProps, getInputProps }) => (
From 3e6a064999c7d64a0f083984c9d4435e2a213521 Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Mon, 13 May 2024 22:22:32 +0200 Subject: [PATCH 138/147] fix(download): improve performance of Excel file download --- antarest/study/web/raw_studies_blueprint.py | 62 ++++++++++++++++++++- requirements.txt | 5 +- 2 files changed, 62 insertions(+), 5 deletions(-) diff --git a/antarest/study/web/raw_studies_blueprint.py b/antarest/study/web/raw_studies_blueprint.py index 65becf59a2..2ce44c5d48 100644 --- a/antarest/study/web/raw_studies_blueprint.py +++ b/antarest/study/web/raw_studies_blueprint.py @@ -25,6 +25,12 @@ from antarest.study.service import StudyService from antarest.study.storage.rawstudy.model.filesystem.matrix.matrix import MatrixFrequency +try: + import tables # type: ignore + import xlsxwriter # type: ignore +except ImportError: + raise ImportError("The 'xlsxwriter' and 'tables' packages are required") from None + logger = logging.getLogger(__name__) @@ -69,7 +75,10 @@ class TableExportFormat(EnumIgnoreCase): """Export format for tables.""" XLSX = "xlsx" + HDF5 = "hdf5" TSV = "tsv" + CSV = "csv" + CSV_SEMICOLON = "csv (semicolon)" def __str__(self) -> str: """Return the format as a string for display.""" @@ -83,6 +92,10 @@ def media_type(self) -> str: return "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet" elif self == TableExportFormat.TSV: return "text/tab-separated-values" + elif self in (TableExportFormat.CSV, TableExportFormat.CSV_SEMICOLON): + return "text/csv" + elif self == TableExportFormat.HDF5: + return "application/x-hdf5" else: # pragma: no cover raise NotImplementedError(f"Export format '{self}' is not implemented") @@ -93,6 +106,10 @@ def suffix(self) -> str: return ".xlsx" elif self == TableExportFormat.TSV: return ".tsv" + elif self in (TableExportFormat.CSV, TableExportFormat.CSV_SEMICOLON): + return ".csv" + elif self == TableExportFormat.HDF5: + return ".h5" else: # pragma: no cover raise NotImplementedError(f"Export format '{self}' is not implemented") @@ -106,9 +123,45 @@ def export_table( ) -> None: """Export a table to a file in the given format.""" if self == TableExportFormat.XLSX: - return df.to_excel(export_path, index=with_index, header=with_header, engine="openpyxl") + return df.to_excel( + export_path, + index=with_index, + header=with_header, + engine="xlsxwriter", + ) elif self == TableExportFormat.TSV: - return df.to_csv(export_path, sep="\t", index=with_index, header=with_header, float_format="%.6f") + return df.to_csv( + export_path, + sep="\t", + index=with_index, + header=with_header, + float_format="%.6f", + ) + elif self == TableExportFormat.CSV: + return df.to_csv( + export_path, + sep=",", + index=with_index, + header=with_header, + float_format="%.6f", + ) + elif self == TableExportFormat.CSV_SEMICOLON: + return df.to_csv( + export_path, + sep=";", + decimal=",", + index=with_index, + header=with_header, + float_format="%.6f", + ) + elif self == TableExportFormat.HDF5: + return df.to_hdf( + export_path, + key="data", + mode="w", + format="table", + data_columns=True, + ) else: # pragma: no cover raise NotImplementedError(f"Export format '{self}' is not implemented") @@ -464,7 +517,10 @@ def get_matrix( return FileResponse( export_path, - headers={"Content-Disposition": f'attachment; filename="{export_file_download.filename}"'}, + headers={ + "Content-Disposition": f'attachment; filename="{export_file_download.filename}"', + "Content-Type": f"{export_format.media_type}; charset=utf-8", + }, media_type=export_format.media_type, ) diff --git a/requirements.txt b/requirements.txt index 40373fbc18..835af45e10 100644 --- a/requirements.txt +++ b/requirements.txt @@ -13,7 +13,6 @@ Jinja2~=3.0.3 jsonref~=0.2 MarkupSafe~=2.0.1 numpy~=1.22.1 -openpyxl~=3.1.2 pandas~=1.4.0 paramiko~=2.12.0 plyer~=2.0.0 @@ -28,5 +27,7 @@ redis~=4.1.2 requests~=2.27.1 SQLAlchemy~=1.4.46 starlette~=0.17.1 +tables typing_extensions~=4.7.1 -uvicorn[standard]~=0.15.0 \ No newline at end of file +uvicorn[standard]~=0.15.0 +xlsxwriter~=3.2.0 From 4eb10ba8cfbceb769a2edca073155c94090e1f09 Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Mon, 13 May 2024 20:37:17 +0200 Subject: [PATCH 139/147] build: new minor release v2.17 (2024-05-15) --- antarest/__init__.py | 4 ++-- docs/CHANGELOG.md | 30 ++++++++++++++++++++++++++++++ scripts/package_antares_web.sh | 2 +- setup.py | 2 +- sonar-project.properties | 2 +- webapp/package-lock.json | 4 ++-- webapp/package.json | 2 +- 7 files changed, 38 insertions(+), 8 deletions(-) diff --git a/antarest/__init__.py b/antarest/__init__.py index f4fae4cd35..9895b016bd 100644 --- a/antarest/__init__.py +++ b/antarest/__init__.py @@ -7,9 +7,9 @@ # Standard project metadata -__version__ = "2.16.8" +__version__ = "2.17" __author__ = "RTE, Antares Web Team" -__date__ = "2024-04-19" +__date__ = "2024-05-15" # noinspection SpellCheckingInspection __credits__ = "(c) Réseau de Transport de l’Électricité (RTE)" diff --git a/docs/CHANGELOG.md b/docs/CHANGELOG.md index 3ab8134941..8b4933a3eb 100644 --- a/docs/CHANGELOG.md +++ b/docs/CHANGELOG.md @@ -1,6 +1,36 @@ Antares Web Changelog ===================== +v2.17 (2024-05-15) +------------------ + +Support for evolutions relating to studies in versions 8.7: +- Scenarized RHS for binding constraints, +- Thermal cluster new properties (cost generation mode, efficiency, variable OM cost) + +Support for evolutions relating to studies in versions 8.8: +- Short-term storage¶: add `enabled` property +- Experimental "MILP" mode (using launcher options) + +### Features + +* **bc:** add endpoint for multiple terms edition [`#2020`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/2020) +* **table-mode:** add missing properties for v8.6 and 8.7 [`#1643`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1643) +* **ui-table-mode:** translate table types in add/edit modal + + +### Bug Fixes + +* **bc:** handle undefined v8.3 fields [`#2026`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/2026) +* **table-mode:** hide `adequacy_patch_mode` column from table-mode before v8.3 [`#2022`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/2022) +* **ui-common:** allow only import of TSV file in `MatrixInput` [`#2027`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/2027) +* **ui-settings:** prevent false duplicates on group form updates [`#1998`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1998) +* **ui-table-mode:** reset 'column' field when 'type' field change in create/update modal +* **ui-table-mode:** unable to edit tables with old types +* **ui-table-mode:** add missing "ST Storage" in Table Mode template [`#2016`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/2016) +* **download**: improve performance of Excel file download + + v2.16.8 (2024-04-19) -------------------- diff --git a/scripts/package_antares_web.sh b/scripts/package_antares_web.sh index 21008c15f8..8b1999cfb9 100755 --- a/scripts/package_antares_web.sh +++ b/scripts/package_antares_web.sh @@ -9,7 +9,7 @@ set -e ANTARES_SOLVER_VERSION="8.8" -ANTARES_SOLVER_FULL_VERSION="8.8.3" +ANTARES_SOLVER_FULL_VERSION="8.8.4" ANTARES_SOLVER_VERSION_INT="880" SCRIPT_DIR=$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" &>/dev/null && pwd -P) diff --git a/setup.py b/setup.py index ce3b1690ea..76e6c3becb 100644 --- a/setup.py +++ b/setup.py @@ -6,7 +6,7 @@ setup( name="AntaREST", - version="2.16.8", + version="2.17", description="Antares Server", long_description=Path("README.md").read_text(encoding="utf-8"), long_description_content_type="text/markdown", diff --git a/sonar-project.properties b/sonar-project.properties index ed0c9da1c1..56212f62f5 100644 --- a/sonar-project.properties +++ b/sonar-project.properties @@ -6,5 +6,5 @@ sonar.exclusions=antarest/gui.py,antarest/main.py sonar.python.coverage.reportPaths=coverage.xml sonar.python.version=3.8 sonar.javascript.lcov.reportPaths=webapp/coverage/lcov.info -sonar.projectVersion=2.16.8 +sonar.projectVersion=2.17 sonar.coverage.exclusions=antarest/gui.py,antarest/main.py,antarest/singleton_services.py,antarest/worker/archive_worker_service.py,webapp/**/* \ No newline at end of file diff --git a/webapp/package-lock.json b/webapp/package-lock.json index 4eac54256e..10711529c7 100644 --- a/webapp/package-lock.json +++ b/webapp/package-lock.json @@ -1,12 +1,12 @@ { "name": "antares-web", - "version": "2.16.8", + "version": "2.17", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "antares-web", - "version": "2.16.8", + "version": "2.17", "dependencies": { "@emotion/react": "11.11.1", "@emotion/styled": "11.11.0", diff --git a/webapp/package.json b/webapp/package.json index 0f455c639b..106345bf84 100644 --- a/webapp/package.json +++ b/webapp/package.json @@ -1,6 +1,6 @@ { "name": "antares-web", - "version": "2.16.8", + "version": "2.17", "private": true, "type": "module", "scripts": { From 77f56cbf4f88e8ed733f4b3b57a7e668679bbecc Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Mon, 13 May 2024 22:45:51 +0200 Subject: [PATCH 140/147] test: correct test_download_matrices.py --- requirements-test.txt | 6 +++++- .../raw_studies_blueprint/test_download_matrices.py | 4 +++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/requirements-test.txt b/requirements-test.txt index 10e44592a1..8e408b2677 100644 --- a/requirements-test.txt +++ b/requirements-test.txt @@ -1,4 +1,8 @@ -r requirements.txt checksumdir~=1.2.0 pytest~=6.2.5 -pytest-cov~=4.0.0 \ No newline at end of file +pytest-cov~=4.0.0 + +# In this version DataFrame conversion to Excel is done using 'xlsxwriter' library. +# But Excel files reading is done using 'openpyxl' library, during testing only. +openpyxl~=3.1.2 \ No newline at end of file diff --git a/tests/integration/raw_studies_blueprint/test_download_matrices.py b/tests/integration/raw_studies_blueprint/test_download_matrices.py index ca2c501374..0fcf2683c5 100644 --- a/tests/integration/raw_studies_blueprint/test_download_matrices.py +++ b/tests/integration/raw_studies_blueprint/test_download_matrices.py @@ -160,7 +160,9 @@ def test_download_matrices(self, client: TestClient, user_access_token: str, stu ) assert res.status_code == 200 # noinspection SpellCheckingInspection - assert res.headers["content-type"] == "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet" + assert res.headers["content-type"] == ( + "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet; charset=utf-8" + ) # load into dataframe # noinspection PyTypeChecker From 98fc18d345093507b5d2808e8119ee2e05bea5f8 Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Tue, 14 May 2024 04:02:20 +0200 Subject: [PATCH 141/147] chore: correct invalid escape sequence `\s` in regex --- antarest/study/business/advanced_parameters_management.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/antarest/study/business/advanced_parameters_management.py b/antarest/study/business/advanced_parameters_management.py index 6cd0825322..f18e47a71f 100644 --- a/antarest/study/business/advanced_parameters_management.py +++ b/antarest/study/business/advanced_parameters_management.py @@ -91,7 +91,7 @@ def check_accuracy_on_correlation(cls, v: str) -> str: return "" allowed_values = ["wind", "load", "solar"] - values_list = re.split("\s*,\s*", v.strip()) + values_list = re.split(r"\s*,\s*", v.strip()) if len(values_list) != len(set(values_list)): raise ValueError("Duplicate value") From aae515cdafd1cd46795dbfd0763193e90f7c3ddd Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Tue, 14 May 2024 04:10:15 +0200 Subject: [PATCH 142/147] test: class `TestSubNode` is renamed `CheckSubNode` to avoid warning during pytest classes collection --- tests/storage/repository/filesystem/test_folder_node.py | 6 +++--- tests/storage/repository/filesystem/utils.py | 5 +++-- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/tests/storage/repository/filesystem/test_folder_node.py b/tests/storage/repository/filesystem/test_folder_node.py index d08360e223..7927927d7e 100644 --- a/tests/storage/repository/filesystem/test_folder_node.py +++ b/tests/storage/repository/filesystem/test_folder_node.py @@ -13,7 +13,7 @@ from antarest.study.storage.rawstudy.model.filesystem.inode import INode from antarest.study.storage.rawstudy.model.filesystem.raw_file_node import RawFileNode from antarest.study.storage.rawstudy.model.filesystem.root.input.areas.list import InputAreasList -from tests.storage.repository.filesystem.utils import TestMiddleNode, TestSubNode +from tests.storage.repository.filesystem.utils import CheckSubNode, TestMiddleNode def build_tree() -> INode[t.Any, t.Any, t.Any]: @@ -24,8 +24,8 @@ def build_tree() -> INode[t.Any, t.Any, t.Any]: context=Mock(), config=config, children={ - "input": TestSubNode(value=100), - "output": TestSubNode(value=200), + "input": CheckSubNode(config, value=100), + "output": CheckSubNode(config, value=200), }, ) diff --git a/tests/storage/repository/filesystem/utils.py b/tests/storage/repository/filesystem/utils.py index 82e0107082..abef9e26e5 100644 --- a/tests/storage/repository/filesystem/utils.py +++ b/tests/storage/repository/filesystem/utils.py @@ -8,7 +8,7 @@ from antarest.study.storage.rawstudy.model.filesystem.inode import TREE, INode -class TestSubNode(INode[int, int, int]): +class CheckSubNode(INode[int, int, int]): def normalize(self) -> None: pass @@ -18,7 +18,8 @@ def denormalize(self) -> None: def build(self, config: FileStudyTreeConfig) -> "TREE": pass - def __init__(self, value: int): + def __init__(self, config: FileStudyTreeConfig, value: int): + super().__init__(config) self.value = value def get_node( From 4f6da593090d05fdcee6cdd10b567ad916fe9f5b Mon Sep 17 00:00:00 2001 From: Laurent LAPORTE Date: Tue, 14 May 2024 04:26:32 +0200 Subject: [PATCH 143/147] chore: use `copy_on_model_validation="deep"` in `ICommand` to avoid deprecation warning --- antarest/study/storage/variantstudy/model/command/icommand.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/antarest/study/storage/variantstudy/model/command/icommand.py b/antarest/study/storage/variantstudy/model/command/icommand.py index 72eb6bfa02..1c5c704320 100644 --- a/antarest/study/storage/variantstudy/model/command/icommand.py +++ b/antarest/study/storage/variantstudy/model/command/icommand.py @@ -19,7 +19,7 @@ logger = logging.getLogger(__name__) -class ICommand(ABC, BaseModel, extra=Extra.forbid, arbitrary_types_allowed=True): +class ICommand(ABC, BaseModel, extra=Extra.forbid, arbitrary_types_allowed=True, copy_on_model_validation="deep"): """ Interface for all commands that can be applied to a study. From 63eda4adc95c3c82f577d1e33113bbe4422dbfe6 Mon Sep 17 00:00:00 2001 From: Samir Kamal <1954121+skamril@users.noreply.github.com> Date: Thu, 16 May 2024 12:39:13 +0200 Subject: [PATCH 144/147] feat(ui-utils): add overload signatures for `validateNumber` function in validationUtils --- webapp/src/utils/validationUtils.ts | 44 ++++++++++++++++++++++++----- 1 file changed, 37 insertions(+), 7 deletions(-) diff --git a/webapp/src/utils/validationUtils.ts b/webapp/src/utils/validationUtils.ts index 5b3f60091d..5760f285ae 100644 --- a/webapp/src/utils/validationUtils.ts +++ b/webapp/src/utils/validationUtils.ts @@ -4,6 +4,11 @@ import { t } from "i18next"; // Types //////////////////////////////////////////////////////////////// +interface NumberValidationOptions { + min?: number; + max?: number; +} + interface ValidationOptions { existingValues?: string[]; excludedValues?: string[]; @@ -153,22 +158,47 @@ export function validatePassword(password: string): string | true { /** * Validates a number against specified numerical limits. * + * @example + * validateNumber(5, { min: 0, max: 10 }); // true + * validateNumber(9, { min: 10, max: 20 }); // Error message + * + * + * @example With currying. + * const fn = validateNumber({ min: 0, max: 10 }); + * fn(5); // true + * fn(11); // Error message + * * @param value - The number to validate. - * @param options - Configuration options for validation including min and max values. (Optional) - * @param [options.min=Number.MIN_SAFE_INTEGER] - Minimum allowed value for the number. - * @param [options.max=Number.MAX_SAFE_INTEGER] - Maximum allowed value for the number. + * @param [options] - Configuration options for validation. + * @param [options.min=Number.MIN_SAFE_INTEGER] - Minimum allowed value. + * @param [options.max=Number.MAX_SAFE_INTEGER] - Maximum allowed value. * @returns True if validation is successful, or a localized error message if it fails. */ export function validateNumber( value: number, - options?: ValidationOptions, -): string | true { - if (typeof value !== "number" || isNaN(value) || !isFinite(value)) { + options?: NumberValidationOptions, +): string | true; + +export function validateNumber( + options?: NumberValidationOptions, +): (value: number) => string | true; + +export function validateNumber( + valueOrOpts?: number | NumberValidationOptions, + options: NumberValidationOptions = {}, +): (string | true) | ((value: number) => string | true) { + if (typeof valueOrOpts !== "number") { + return (v: number) => validateNumber(v, valueOrOpts); + } + + const value = valueOrOpts; + + if (!isFinite(value)) { return t("form.field.invalidNumber", { value }); } const { min = Number.MIN_SAFE_INTEGER, max = Number.MAX_SAFE_INTEGER } = - options || {}; + options; if (value < min) { return t("form.field.minValue", { 0: min }); From 430d88bad3f488162fc08a298195a9d20c6d4e2c Mon Sep 17 00:00:00 2001 From: Samir Kamal <1954121+skamril@users.noreply.github.com> Date: Wed, 15 May 2024 14:07:19 +0200 Subject: [PATCH 145/147] style(ui-thermals): use curry version of `validateNumber` --- .../Modelization/Areas/Thermal/Fields.tsx | 28 +++++++++---------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/Fields.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/Fields.tsx index 15f67cdb41..b553152ab8 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/Fields.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/Fields.tsx @@ -74,7 +74,7 @@ function Fields() { name="unitCount" control={control} rules={{ - validate: (v) => validateNumber(v, { min: 1 }), + validate: validateNumber({ min: 1 }), setValueAs: Math.floor, }} /> @@ -83,7 +83,7 @@ function Fields() { name="nominalCapacity" control={control} rules={{ - validate: (v) => validateNumber(v, { min: 0 }), + validate: validateNumber({ min: 0 }), }} /> validateNumber(v, { min: 0, max: 100 }), + validate: validateNumber({ min: 0, max: 100 }), }} /> validateNumber(v, { min: 1, max: 168 }), + validate: validateNumber({ min: 1, max: 168 }), setValueAs: Math.floor, }} /> @@ -113,7 +113,7 @@ function Fields() { name="minDownTime" control={control} rules={{ - validate: (v) => validateNumber(v, { min: 1, max: 168 }), + validate: validateNumber({ min: 1, max: 168 }), setValueAs: Math.floor, }} /> @@ -134,7 +134,7 @@ function Fields() { name="efficiency" control={control} rules={{ - validate: (v) => validateNumber(v, { min: 0 }), + validate: validateNumber({ min: 0 }), }} disabled={!isTSCost} /> @@ -143,7 +143,7 @@ function Fields() { name="marginalCost" control={control} rules={{ - validate: (v) => validateNumber(v, { min: 0 }), + validate: validateNumber({ min: 0 }), }} /> @@ -152,7 +152,7 @@ function Fields() { name="startupCost" control={control} rules={{ - validate: (v) => validateNumber(v, { min: 0 }), + validate: validateNumber({ min: 0 }), }} /> validateNumber(v, { min: 0 }), + validate: validateNumber({ min: 0 }), }} /> validateNumber(v, { min: 0 }), + validate: validateNumber({ min: 0 }), }} /> validateNumber(v, { min: 0 }), + validate: validateNumber({ min: 0 }), }} disabled={!isTSCost} /> @@ -196,7 +196,7 @@ function Fields() { name={name} control={control} rules={{ - validate: (v) => validateNumber(v, { min: 0 }), + validate: validateNumber({ min: 0 }), }} /> ), @@ -218,7 +218,7 @@ function Fields() { name="volatilityForced" control={control} rules={{ - validate: (v) => validateNumber(v, { min: 0, max: 1 }), + validate: validateNumber({ min: 0, max: 1 }), }} inputProps={{ step: 0.1 }} /> @@ -227,7 +227,7 @@ function Fields() { name="volatilityPlanned" control={control} rules={{ - validate: (v) => validateNumber(v, { min: 0, max: 1 }), + validate: validateNumber({ min: 0, max: 1 }), }} inputProps={{ step: 0.1 }} /> From 6afb77cfd697b901073e0fc979a52cd50e22e05b Mon Sep 17 00:00:00 2001 From: Samir Kamal <1954121+skamril@users.noreply.github.com> Date: Thu, 16 May 2024 12:18:17 +0200 Subject: [PATCH 146/147] fix(ui-utils): change error message for string length in validationUtils --- webapp/public/locales/en/main.json | 3 +- webapp/public/locales/fr/main.json | 3 +- .../BindingConstraints/AddDialog.tsx | 2 +- .../BindingConstView/ConstraintFields.tsx | 2 +- webapp/src/utils/validationUtils.ts | 28 +++++++++---------- 5 files changed, 20 insertions(+), 18 deletions(-) diff --git a/webapp/public/locales/en/main.json b/webapp/public/locales/en/main.json index 49ed4ce172..ccfac647f2 100644 --- a/webapp/public/locales/en/main.json +++ b/webapp/public/locales/en/main.json @@ -123,7 +123,8 @@ "form.asyncDefaultValues.error": "Failed to get values", "form.field.required": "Field required", "form.field.duplicate": "Value already exists", - "form.field.minLength": "{{0}} character(s) minimum", + "form.field.minLength": "{{length}} character(s) minimum", + "form.field.maxLength": "{{length}} character(s) maximum", "form.field.minValue": "The minimum value is {{0}}", "form.field.maxValue": "The maximum value is {{0}}", "form.field.invalidNumber": "Invalid number", diff --git a/webapp/public/locales/fr/main.json b/webapp/public/locales/fr/main.json index 21085ed4bd..ef12be54ec 100644 --- a/webapp/public/locales/fr/main.json +++ b/webapp/public/locales/fr/main.json @@ -123,7 +123,8 @@ "form.asyncDefaultValues.error": "Impossible d'obtenir les valeurs", "form.field.required": "Champ requis", "form.field.duplicate": "Cette valeur existe déjà", - "form.field.minLength": "{{0}} caractère(s) minimum", + "form.field.minLength": "{{length}} caractère(s) minimum", + "form.field.maxLength": "{{length}} caractère(s) maximum", "form.field.minValue": "La valeur minimum est {{0}}", "form.field.maxValue": "La valeur maximum est {{0}}", "form.field.invalidNumber": "Nombre invalide", diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/BindingConstraints/AddDialog.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/BindingConstraints/AddDialog.tsx index f1af440491..6891123b8c 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/BindingConstraints/AddDialog.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/BindingConstraints/AddDialog.tsx @@ -157,7 +157,7 @@ function AddDialog({ rules={{ validate: (v) => validateString(v, { - max: 20, + maxLength: 20, specialChars: "-", }), }} diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/BindingConstraints/BindingConstView/ConstraintFields.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/BindingConstraints/BindingConstView/ConstraintFields.tsx index 3405540a6e..7677a78c4f 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/BindingConstraints/BindingConstView/ConstraintFields.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/BindingConstraints/BindingConstView/ConstraintFields.tsx @@ -90,7 +90,7 @@ function Fields({ study, constraintId }: Props) { rules={{ validate: (v) => validateString(v, { - max: 20, + maxLength: 20, specialChars: "-", }), }} diff --git a/webapp/src/utils/validationUtils.ts b/webapp/src/utils/validationUtils.ts index 5760f285ae..86fb294d8b 100644 --- a/webapp/src/utils/validationUtils.ts +++ b/webapp/src/utils/validationUtils.ts @@ -9,7 +9,7 @@ interface NumberValidationOptions { max?: number; } -interface ValidationOptions { +interface StringValidationOptions { existingValues?: string[]; excludedValues?: string[]; isCaseSensitive?: boolean; @@ -17,8 +17,8 @@ interface ValidationOptions { specialChars?: string; allowSpaces?: boolean; editedValue?: string; - min?: number; - max?: number; + minLength?: number; + maxLength?: number; } //////////////////////////////////////////////////////////////// @@ -40,13 +40,13 @@ interface ValidationOptions { * @param [options.specialChars="&()_-"] - A string representing additional allowed characters outside the typical alphanumeric scope. * @param [options.allowSpaces=true] - Flags if spaces are allowed in the value. * @param [options.editedValue=""] - The current value being edited, to exclude it from duplicate checks. - * @param [options.min=0] - Minimum length required for the string. Defaults to 0. - * @param [options.max=255] - Maximum allowed length for the string. Defaults to 255. + * @param [options.minLength=0] - Minimum length required for the string. Defaults to 0. + * @param [options.maxLength=255] - Maximum allowed length for the string. Defaults to 255. * @returns True if validation is successful, or a localized error message if it fails. */ export function validateString( value: string, - options?: ValidationOptions, + options?: StringValidationOptions, ): string | true { const { existingValues = [], @@ -56,8 +56,8 @@ export function validateString( allowSpaces = true, specialChars = "&()_-", editedValue = "", - min = 0, - max = 255, + minLength = 0, + maxLength = 255, } = options || {}; const trimmedValue = value.trim(); @@ -70,12 +70,12 @@ export function validateString( return t("form.field.spacesNotAllowed"); } - if (trimmedValue.length < min) { - return t("form.field.minValue", { 0: min }); + if (trimmedValue.length < minLength) { + return t("form.field.minLength", { length: minLength }); } - if (trimmedValue.length > max) { - return t("form.field.maxValue", { 0: max }); + if (trimmedValue.length > maxLength) { + return t("form.field.maxLength", { length: maxLength }); } // Compiles a regex pattern based on allowed characters and flags. @@ -129,11 +129,11 @@ export function validatePassword(password: string): string | true { } if (trimmedPassword.length < 8) { - return t("form.field.minValue", { 0: 8 }); + return t("form.field.minLength", { length: 8 }); } if (trimmedPassword.length > 50) { - return t("form.field.maxValue", { 0: 50 }); + return t("form.field.maxLength", { length: 50 }); } if (!/[a-z]/.test(trimmedPassword)) { From 428961ea999d6148704c96d4105e248ee86ad5c2 Mon Sep 17 00:00:00 2001 From: MartinBelthle Date: Fri, 17 May 2024 13:10:14 +0200 Subject: [PATCH 147/147] fix(import): allow import for users that are reader only (#2032) --- antarest/study/service.py | 26 +--- tests/integration/test_integration.py | 209 ++++++++++++-------------- 2 files changed, 105 insertions(+), 130 deletions(-) diff --git a/antarest/study/service.py b/antarest/study/service.py index 66b3b2ddad..aab951326b 100644 --- a/antarest/study/service.py +++ b/antarest/study/service.py @@ -38,7 +38,6 @@ from antarest.core.jwt import DEFAULT_ADMIN_USER, JWTGroup, JWTUser from antarest.core.model import JSON, SUB_JSON, PermissionInfo, PublicMode, StudyPermissionType from antarest.core.requests import RequestParameters, UserHasNotPermissionError -from antarest.core.roles import RoleType from antarest.core.tasks.model import TaskListFilter, TaskResult, TaskStatus, TaskType from antarest.core.tasks.service import ITaskService, TaskUpdateNotifier, noop_notifier from antarest.core.utils.fastapi_sqlalchemy import db @@ -104,7 +103,6 @@ StudySortBy, ) from antarest.study.storage.matrix_profile import adjust_matrix_columns_index -from antarest.study.storage.rawstudy.model.filesystem.config.area import AreaUI from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfigDTO from antarest.study.storage.rawstudy.model.filesystem.folder_node import ChildNotFoundError from antarest.study.storage.rawstudy.model.filesystem.ini_file_node import IniFileNode @@ -1396,13 +1394,7 @@ def import_study( study = self.storage_service.raw_study_service.import_study(study, stream) study.updated_at = datetime.utcnow() - # status = self._analyse_study(study) - self._save_study( - study, - owner=params.user, - group_ids=group_ids, - # content_status=status, - ) + self._save_study(study, params.user, group_ids) self.event_bus.push( Event( type=EventType.STUDY_CREATED, @@ -2014,7 +2006,6 @@ def _save_study( study: Study, owner: t.Optional[JWTUser] = None, group_ids: t.Sequence[str] = (), - content_status: StudyContentStatus = StudyContentStatus.VALID, ) -> None: """ Create or update a study with specified attributes. @@ -2026,29 +2017,24 @@ def _save_study( study: The study to be saved or updated. owner: The owner of the study (current authenticated user). group_ids: The list of group IDs to associate with the study. - content_status: The new content status for the study. Raises: UserHasNotPermissionError: - If the owner is not specified or has invalid authentication, - or if permission is denied for any of the specified group IDs. + If the owner or the group role is not specified. """ if not owner: raise UserHasNotPermissionError("owner is not specified or has invalid authentication") if isinstance(study, RawStudy): - study.content_status = content_status + study.content_status = StudyContentStatus.VALID study.owner = self.user_service.get_user(owner.impersonator, params=RequestParameters(user=owner)) study.groups.clear() for gid in group_ids: - jwt_group: t.Optional[JWTGroup] = next(filter(lambda g: g.id == gid, owner.groups), None) # type: ignore - if ( - jwt_group is None - or jwt_group.role is None - or (jwt_group.role < RoleType.WRITER and not owner.is_site_admin()) - ): + owned_groups = (g for g in owner.groups if g.id == gid) + jwt_group: t.Optional[JWTGroup] = next(owned_groups, None) + if jwt_group is None or jwt_group.role is None: raise UserHasNotPermissionError(f"Permission denied for group ID: {gid}") study.groups.append(Group(id=jwt_group.id, name=jwt_group.name)) diff --git a/tests/integration/test_integration.py b/tests/integration/test_integration.py index 8c03873992..a5f889afe6 100644 --- a/tests/integration/test_integration.py +++ b/tests/integration/test_integration.py @@ -19,33 +19,29 @@ from tests.integration.utils import wait_for -def test_main(client: TestClient, admin_access_token: str, study_id: str) -> None: - admin_headers = {"Authorization": f"Bearer {admin_access_token}"} +def test_main(client: TestClient, admin_access_token: str) -> None: + client.headers = {"Authorization": f"Bearer {admin_access_token}"} # create some new users # TODO check for bad username or empty password client.post( "/v1/users", - headers=admin_headers, json={"name": "George", "password": "mypass"}, ) client.post( "/v1/users", - headers=admin_headers, json={"name": "Fred", "password": "mypass"}, ) client.post( "/v1/users", - headers=admin_headers, json={"name": "Harry", "password": "mypass"}, ) - res = client.get("/v1/users", headers=admin_headers) + res = client.get("/v1/users") assert len(res.json()) == 4 # reject user with existing name creation res = client.post( "/v1/users", - headers=admin_headers, json={"name": "George", "password": "mypass"}, ) assert res.status_code == 400 @@ -231,19 +227,16 @@ def test_main(client: TestClient, admin_access_token: str, study_id: str) -> Non # play with groups client.post( "/v1/groups", - headers=admin_headers, json={"name": "Weasley"}, ) - res = client.get("/v1/groups", headers=admin_headers) + res = client.get("/v1/groups") group_id = res.json()[1]["id"] client.post( "/v1/roles", - headers=admin_headers, json={"type": 40, "group_id": group_id, "identity_id": 3}, ) client.post( "/v1/roles", - headers=admin_headers, json={"type": 30, "group_id": group_id, "identity_id": 2}, ) # reset login to update credentials @@ -282,7 +275,7 @@ def test_main(client: TestClient, admin_access_token: str, study_id: str) -> Non ) job_id = res.json()["job_id"] - res = client.get("/v1/launcher/load", headers=admin_headers) + res = client.get("/v1/launcher/load") assert res.status_code == 200, res.json() launcher_load = LauncherLoadDTO(**res.json()) assert launcher_load.allocated_cpu_rate == 100 / (os.cpu_count() or 1) @@ -331,20 +324,19 @@ def test_main(client: TestClient, admin_access_token: str, study_id: str) -> Non assert new_meta.json()["horizon"] == "2035" -def test_matrix(client: TestClient, admin_access_token: str, study_id: str) -> None: - admin_headers = {"Authorization": f"Bearer {admin_access_token}"} +def test_matrix(client: TestClient, admin_access_token: str) -> None: + client.headers = {"Authorization": f"Bearer {admin_access_token}"} matrix = [[1, 2], [3, 4]] res = client.post( "/v1/matrix", - headers=admin_headers, json=matrix, ) assert res.status_code == 200 - res = client.get(f"/v1/matrix/{res.json()}", headers=admin_headers) + res = client.get(f"/v1/matrix/{res.json()}") assert res.status_code == 200 stored = res.json() @@ -353,7 +345,7 @@ def test_matrix(client: TestClient, admin_access_token: str, study_id: str) -> N matrix_id = stored["id"] - res = client.get(f"/v1/matrix/{matrix_id}/download", headers=admin_headers) + res = client.get(f"/v1/matrix/{matrix_id}/download") assert res.status_code == 200 res = client.post( @@ -366,30 +358,29 @@ def test_matrix(client: TestClient, admin_access_token: str, study_id: str) -> N }, "matrices": [{"id": matrix_id, "name": "mymatrix"}], }, - headers=admin_headers, ) assert res.status_code == 200 - res = client.get("/v1/matrixdataset/_search?name=myda", headers=admin_headers) + res = client.get("/v1/matrixdataset/_search?name=myda") results = res.json() assert len(results) == 1 assert len(results[0]["matrices"]) == 1 assert results[0]["matrices"][0]["id"] == matrix_id dataset_id = results[0]["id"] - res = client.get(f"/v1/matrixdataset/{dataset_id}/download", headers=admin_headers) + res = client.get(f"/v1/matrixdataset/{dataset_id}/download") assert res.status_code == 200 - res = client.delete(f"/v1/matrixdataset/{dataset_id}", headers=admin_headers) + res = client.delete(f"/v1/matrixdataset/{dataset_id}") assert res.status_code == 200 def test_area_management(client: TestClient, admin_access_token: str) -> None: - admin_headers = {"Authorization": f"Bearer {admin_access_token}"} + client.headers = {"Authorization": f"Bearer {admin_access_token}"} - created = client.post("/v1/studies", headers=admin_headers, params={"name": "foo", "version": 870}) + created = client.post("/v1/studies", params={"name": "foo", "version": 870}) study_id = created.json() - res_areas = client.get(f"/v1/studies/{study_id}/areas", headers=admin_headers) + res_areas = client.get(f"/v1/studies/{study_id}/areas") assert res_areas.json() == [ { "id": "all areas", @@ -403,7 +394,6 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: res = client.post( f"/v1/studies/{study_id}/areas", - headers=admin_headers, json={ "name": "area 1", "type": "AREA", @@ -415,7 +405,6 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: # Test area creation with duplicate name res = client.post( f"/v1/studies/{study_id}/areas", - headers=admin_headers, json={ "name": "Area 1", # Same name but with different case "type": "AREA", @@ -430,7 +419,6 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: client.post( f"/v1/studies/{study_id}/areas", - headers=admin_headers, json={ "name": "area 2", "type": "AREA", @@ -440,7 +428,6 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: client.post( f"/v1/studies/{study_id}/commands", - headers=admin_headers, json=[ { "action": CommandName.CREATE_THERMAL_CLUSTER.value, @@ -455,7 +442,6 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: client.post( f"/v1/studies/{study_id}/commands", - headers=admin_headers, json=[ { "action": CommandName.CREATE_THERMAL_CLUSTER.value, @@ -470,7 +456,6 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: client.post( f"/v1/studies/{study_id}/commands", - headers=admin_headers, json=[ { "action": CommandName.CREATE_RENEWABLES_CLUSTER.value, @@ -485,7 +470,6 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: client.post( f"/v1/studies/{study_id}/commands", - headers=admin_headers, json=[ { "action": CommandName.CREATE_RENEWABLES_CLUSTER.value, @@ -500,7 +484,6 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: res = client.post( f"/v1/studies/{study_id}/commands", - headers=admin_headers, json=[ { "action": CommandName.CREATE_BINDING_CONSTRAINT.value, @@ -518,7 +501,6 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: res = client.post( f"/v1/studies/{study_id}/commands", - headers=admin_headers, json=[ { "action": CommandName.CREATE_BINDING_CONSTRAINT.value, @@ -534,7 +516,7 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: ) res.raise_for_status() - res_areas = client.get(f"/v1/studies/{study_id}/areas", headers=admin_headers) + res_areas = client.get(f"/v1/studies/{study_id}/areas") assert res_areas.json() == [ { "id": "area 1", @@ -600,13 +582,12 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: client.post( f"/v1/studies/{study_id}/links", - headers=admin_headers, json={ "area1": "area 1", "area2": "area 2", }, ) - res_links = client.get(f"/v1/studies/{study_id}/links?with_ui=true", headers=admin_headers) + res_links = client.get(f"/v1/studies/{study_id}/links?with_ui=true") assert res_links.json() == [ { "area1": "area 1", @@ -617,64 +598,63 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: # -- `layers` integration tests - res = client.get(f"/v1/studies/{study_id}/layers", headers=admin_headers) + res = client.get(f"/v1/studies/{study_id}/layers") assert res.json() == [LayerInfoDTO(id="0", name="All", areas=["area 1", "area 2"]).dict()] - res = client.post(f"/v1/studies/{study_id}/layers?name=test", headers=admin_headers) + res = client.post(f"/v1/studies/{study_id}/layers?name=test") assert res.json() == "1" - res = client.get(f"/v1/studies/{study_id}/layers", headers=admin_headers) + res = client.get(f"/v1/studies/{study_id}/layers") assert res.json() == [ LayerInfoDTO(id="0", name="All", areas=["area 1", "area 2"]).dict(), LayerInfoDTO(id="1", name="test", areas=[]).dict(), ] - res = client.put(f"/v1/studies/{study_id}/layers/1?name=test2", headers=admin_headers) - res = client.put(f"/v1/studies/{study_id}/layers/1", json=["area 1"], headers=admin_headers) - res = client.put(f"/v1/studies/{study_id}/layers/1", json=["area 2"], headers=admin_headers) - res = client.get(f"/v1/studies/{study_id}/layers", headers=admin_headers) + res = client.put(f"/v1/studies/{study_id}/layers/1?name=test2") + res = client.put(f"/v1/studies/{study_id}/layers/1", json=["area 1"]) + res = client.put(f"/v1/studies/{study_id}/layers/1", json=["area 2"]) + res = client.get(f"/v1/studies/{study_id}/layers") assert res.json() == [ LayerInfoDTO(id="0", name="All", areas=["area 1", "area 2"]).dict(), LayerInfoDTO(id="1", name="test2", areas=["area 2"]).dict(), ] # Delete the layer '1' that has 1 area - res = client.delete(f"/v1/studies/{study_id}/layers/1", headers=admin_headers) + res = client.delete(f"/v1/studies/{study_id}/layers/1") assert res.status_code == HTTPStatus.NO_CONTENT # Ensure the layer is deleted - res = client.get(f"/v1/studies/{study_id}/layers", headers=admin_headers) + res = client.get(f"/v1/studies/{study_id}/layers") assert res.json() == [ LayerInfoDTO(id="0", name="All", areas=["area 1", "area 2"]).dict(), ] # Create the layer again without areas - res = client.post(f"/v1/studies/{study_id}/layers?name=test2", headers=admin_headers) + res = client.post(f"/v1/studies/{study_id}/layers?name=test2") assert res.json() == "1" # Delete the layer with no areas - res = client.delete(f"/v1/studies/{study_id}/layers/1", headers=admin_headers) + res = client.delete(f"/v1/studies/{study_id}/layers/1") assert res.status_code == HTTPStatus.NO_CONTENT # Ensure the layer is deleted - res = client.get(f"/v1/studies/{study_id}/layers", headers=admin_headers) + res = client.get(f"/v1/studies/{study_id}/layers") assert res.json() == [ LayerInfoDTO(id="0", name="All", areas=["area 1", "area 2"]).dict(), ] # Try to delete a non-existing layer - res = client.delete(f"/v1/studies/{study_id}/layers/1", headers=admin_headers) + res = client.delete(f"/v1/studies/{study_id}/layers/1") assert res.status_code == HTTPStatus.NOT_FOUND # Try to delete the layer 'All' - res = client.delete(f"/v1/studies/{study_id}/layers/0", headers=admin_headers) + res = client.delete(f"/v1/studies/{study_id}/layers/0") assert res.status_code == HTTPStatus.BAD_REQUEST # -- `district` integration tests res = client.post( f"/v1/studies/{study_id}/districts", - headers=admin_headers, json={ "name": "District 1", "output": True, @@ -693,7 +673,6 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: res = client.put( f"/v1/studies/{study_id}/districts/district%201", - headers=admin_headers, json={ "name": "District 1", "output": True, @@ -703,7 +682,7 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: ) assert res.status_code == 200 - res = client.get(f"/v1/studies/{study_id}/districts", headers=admin_headers) + res = client.get(f"/v1/studies/{study_id}/districts") assert res.status_code == 200 actual = res.json() actual[0]["areas"].sort() @@ -725,12 +704,12 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: }, ] - res = client.delete(f"/v1/studies/{study_id}/districts/district%201", headers=admin_headers) + res = client.delete(f"/v1/studies/{study_id}/districts/district%201") assert res.status_code == 200 # Optimization form - res_optimization_config = client.get(f"/v1/studies/{study_id}/config/optimization/form", headers=admin_headers) + res_optimization_config = client.get(f"/v1/studies/{study_id}/config/optimization/form") res_optimization_config_json = res_optimization_config.json() assert res_optimization_config_json == { "bindingConstraints": True, @@ -749,14 +728,13 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: client.put( f"/v1/studies/{study_id}/config/optimization/form", - headers=admin_headers, json={ "strategicReserve": False, "unfeasibleProblemBehavior": UnfeasibleProblemBehavior.WARNING_VERBOSE.value, "simplexOptimizationRange": SimplexOptimizationRange.DAY.value, }, ) - res_optimization_config = client.get(f"/v1/studies/{study_id}/config/optimization/form", headers=admin_headers) + res_optimization_config = client.get(f"/v1/studies/{study_id}/config/optimization/form") res_optimization_config_json = res_optimization_config.json() assert res_optimization_config_json == { "bindingConstraints": True, @@ -775,7 +753,7 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: # Adequacy patch form - res_adequacy_patch_config = client.get(f"/v1/studies/{study_id}/config/adequacypatch/form", headers=admin_headers) + res_adequacy_patch_config = client.get(f"/v1/studies/{study_id}/config/adequacypatch/form") res_adequacy_patch_config_json = res_adequacy_patch_config.json() assert res_adequacy_patch_config_json == { "enableAdequacyPatch": False, @@ -791,14 +769,13 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: client.put( f"/v1/studies/{study_id}/config/adequacypatch/form", - headers=admin_headers, json={ "ntcBetweenPhysicalAreasOutAdequacyPatch": False, "priceTakingOrder": "Load", "thresholdDisplayLocalMatchingRuleViolations": 1.1, }, ) - res_adequacy_patch_config = client.get(f"/v1/studies/{study_id}/config/adequacypatch/form", headers=admin_headers) + res_adequacy_patch_config = client.get(f"/v1/studies/{study_id}/config/adequacypatch/form") res_adequacy_patch_config_json = res_adequacy_patch_config.json() assert res_adequacy_patch_config_json == { "enableAdequacyPatch": False, @@ -814,7 +791,7 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: # General form - res_general_config = client.get(f"/v1/studies/{study_id}/config/general/form", headers=admin_headers) + res_general_config = client.get(f"/v1/studies/{study_id}/config/general/form") res_general_config_json = res_general_config.json() assert res_general_config_json == { "mode": "Economy", @@ -837,7 +814,6 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: client.put( f"/v1/studies/{study_id}/config/general/form", - headers=admin_headers, json={ "mode": Mode.ADEQUACY.value, "firstDay": 2, @@ -845,7 +821,7 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: "leapYear": True, }, ) - res_general_config = client.get(f"/v1/studies/{study_id}/config/general/form", headers=admin_headers) + res_general_config = client.get(f"/v1/studies/{study_id}/config/general/form") res_general_config_json = res_general_config.json() assert res_general_config_json == { "mode": Mode.ADEQUACY.value, @@ -868,7 +844,7 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: # Thematic trimming form - res = client.get(f"/v1/studies/{study_id}/config/thematictrimming/form", headers=admin_headers) + res = client.get(f"/v1/studies/{study_id}/config/thematictrimming/form") obj = res.json() assert obj == { "avlDtg": True, @@ -968,7 +944,6 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: client.put( f"/v1/studies/{study_id}/config/thematictrimming/form", - headers=admin_headers, json={ "ovCost": False, "opCost": True, @@ -1035,7 +1010,7 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: "profitByPlant": True, }, ) - res = client.get(f"/v1/studies/{study_id}/config/thematictrimming/form", headers=admin_headers) + res = client.get(f"/v1/studies/{study_id}/config/thematictrimming/form") obj = res.json() assert obj == { "avlDtg": True, @@ -1135,7 +1110,7 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: # Properties form - res_properties_config = client.get(f"/v1/studies/{study_id}/areas/area 1/properties/form", headers=admin_headers) + res_properties_config = client.get(f"/v1/studies/{study_id}/areas/area 1/properties/form") res_properties_config_json = res_properties_config.json() res_properties_config_json["filterSynthesis"] = set(res_properties_config_json["filterSynthesis"]) res_properties_config_json["filterByYear"] = set(res_properties_config_json["filterByYear"]) @@ -1152,7 +1127,6 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: client.put( f"/v1/studies/{study_id}/areas/area 1/properties/form", - headers=admin_headers, json={ "energyCostUnsupplied": 2.0, "energyCostSpilled": 4.0, @@ -1164,7 +1138,7 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: "adequacyPatchMode": "inside", }, ) - res_properties_config = client.get(f"/v1/studies/{study_id}/areas/area 1/properties/form", headers=admin_headers) + res_properties_config = client.get(f"/v1/studies/{study_id}/areas/area 1/properties/form") res_properties_config_json = res_properties_config.json() res_properties_config_json["filterSynthesis"] = set(res_properties_config_json["filterSynthesis"]) res_properties_config_json["filterByYear"] = set(res_properties_config_json["filterByYear"]) @@ -1183,7 +1157,6 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: res_hydro_config = client.put( f"/v1/studies/{study_id}/areas/area 1/hydro/form", - headers=admin_headers, json={ "interDailyBreakdown": 8, "intraDailyModulation": 7, @@ -1193,7 +1166,7 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: ) assert res_hydro_config.status_code == 200 - res_hydro_config = client.get(f"/v1/studies/{study_id}/areas/area 1/hydro/form", headers=admin_headers) + res_hydro_config = client.get(f"/v1/studies/{study_id}/areas/area 1/hydro/form") res_hydro_config_json = res_hydro_config.json() assert res_hydro_config_json == { @@ -1216,7 +1189,7 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: # Time-series form - res_ts_config = client.get(f"/v1/studies/{study_id}/config/timeseries/form", headers=admin_headers) + res_ts_config = client.get(f"/v1/studies/{study_id}/config/timeseries/form") res_ts_config_json = res_ts_config.json() assert res_ts_config_json == { "load": { @@ -1260,7 +1233,6 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: } res_ts_config = client.put( f"/v1/studies/{study_id}/config/timeseries/form", - headers=admin_headers, json={ "thermal": {"stochasticTsStatus": True}, "load": { @@ -1270,7 +1242,7 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: }, }, ) - res_ts_config = client.get(f"/v1/studies/{study_id}/config/timeseries/form", headers=admin_headers) + res_ts_config = client.get(f"/v1/studies/{study_id}/config/timeseries/form") res_ts_config_json = res_ts_config.json() assert res_ts_config_json == { "load": { @@ -1317,7 +1289,6 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: res = client.put( f"/v1/studies/{study_id}/areas/area 1/clusters/renewable/cluster renewable 1/form", - headers=admin_headers, json={ "name": "cluster renewable 1 renamed", "tsInterpretation": "production-factor", @@ -1330,7 +1301,6 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: res = client.get( f"/v1/studies/{study_id}/areas/area 1/clusters/renewable/cluster renewable 1/form", - headers=admin_headers, ) expected = { "enabled": False, @@ -1387,7 +1357,6 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: res = client.put( # This URL is deprecated, but we must check it for backward compatibility. f"/v1/studies/{study_id}/areas/area 1/clusters/thermal/cluster 1/form", - headers=admin_headers, json=obj, ) assert res.status_code == 200, res.json() @@ -1395,29 +1364,26 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: res = client.get( # This URL is deprecated, but we must check it for backward compatibility. f"/v1/studies/{study_id}/areas/area 1/clusters/thermal/cluster 1/form", - headers=admin_headers, ) assert res.status_code == 200, res.json() assert res.json() == {"id": "cluster 1", **obj} # Links - client.delete(f"/v1/studies/{study_id}/links/area%201/area%202", headers=admin_headers) - res_links = client.get(f"/v1/studies/{study_id}/links", headers=admin_headers) + client.delete(f"/v1/studies/{study_id}/links/area%201/area%202") + res_links = client.get(f"/v1/studies/{study_id}/links") assert res_links.json() == [] res = client.put( f"/v1/studies/{study_id}/areas/area%201/ui", - headers=admin_headers, json={"x": 100, "y": 100, "color_rgb": [255, 0, 100]}, ) res = client.put( f"/v1/studies/{study_id}/areas/area%202/ui?layer=1", - headers=admin_headers, json={"x": 105, "y": 105, "color_rgb": [255, 10, 100]}, ) assert res.status_code == 200 - res_ui = client.get(f"/v1/studies/{study_id}/areas?ui=true", headers=admin_headers) + res_ui = client.get(f"/v1/studies/{study_id}/areas?ui=true") assert res_ui.json() == { "area 1": { "ui": { @@ -1447,9 +1413,9 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: }, } - result = client.delete(f"/v1/studies/{study_id}/areas/area%201", headers=admin_headers) + result = client.delete(f"/v1/studies/{study_id}/areas/area%201") assert result.status_code == 200 - res_areas = client.get(f"/v1/studies/{study_id}/areas", headers=admin_headers) + res_areas = client.get(f"/v1/studies/{study_id}/areas") assert res_areas.json() == [ { "id": "area 2", @@ -1488,50 +1454,47 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: ] -def test_archive(client: TestClient, admin_access_token: str, study_id: str, tmp_path: Path) -> None: - admin_headers = {"Authorization": f"Bearer {admin_access_token}"} +def test_archive(client: TestClient, admin_access_token: str, tmp_path: Path) -> None: + client.headers = {"Authorization": f"Bearer {admin_access_token}"} - study_res = client.post("/v1/studies?name=foo", headers=admin_headers) + study_res = client.post("/v1/studies?name=foo") study_id = study_res.json() - res = client.put(f"/v1/studies/{study_id}/archive", headers=admin_headers) + res = client.put(f"/v1/studies/{study_id}/archive") assert res.status_code == 200 task_id = res.json() wait_for( lambda: client.get( f"/v1/tasks/{task_id}", - headers=admin_headers, ).json()["status"] == 3 ) - res = client.get(f"/v1/studies/{study_id}", headers=admin_headers) + res = client.get(f"/v1/studies/{study_id}") assert res.json()["archived"] assert (tmp_path / "archive_dir" / f"{study_id}.zip").exists() - res = client.put(f"/v1/studies/{study_id}/unarchive", headers=admin_headers) + res = client.put(f"/v1/studies/{study_id}/unarchive") task_id = res.json() wait_for( lambda: client.get( f"/v1/tasks/{task_id}", - headers=admin_headers, ).json()["status"] == 3 ) - res = client.get(f"/v1/studies/{study_id}", headers=admin_headers) + res = client.get(f"/v1/studies/{study_id}") assert not res.json()["archived"] assert not (tmp_path / "archive_dir" / f"{study_id}.zip").exists() -def test_maintenance(client: TestClient, admin_access_token: str, study_id: str) -> None: - admin_headers = {"Authorization": f"Bearer {admin_access_token}"} +def test_maintenance(client: TestClient, admin_access_token: str) -> None: + client.headers = {"Authorization": f"Bearer {admin_access_token}"} # Create non admin user res = client.post( "/v1/users", - headers=admin_headers, json={"name": "user", "password": "user"}, ) assert res.status_code == 200 @@ -1544,13 +1507,11 @@ def set_maintenance(value: bool) -> None: # Set maintenance mode result = client.post( f"/v1/core/maintenance?maintenance={'true' if value else 'false'}", - headers=admin_headers, ) assert result.status_code == 200 result = client.get( "/v1/core/maintenance", - headers=admin_headers, ) assert result.status_code == 200 assert result.json() == value @@ -1569,19 +1530,18 @@ def set_maintenance(value: bool) -> None: message = "Hey" res = client.post( "/v1/core/maintenance/message", - headers=admin_headers, json=message, ) assert res.status_code == 200 # Set message info when not admin - res = client.get("/v1/core/maintenance/message", headers=admin_headers) + res = client.get("/v1/core/maintenance/message") assert res.status_code == 200 assert res.json() == message def test_import(client: TestClient, admin_access_token: str, study_id: str) -> None: - admin_headers = {"Authorization": f"Bearer {admin_access_token}"} + client.headers = {"Authorization": f"Bearer {admin_access_token}"} zip_path = ASSETS_DIR / "STA-mini.zip" seven_zip_path = ASSETS_DIR / "STA-mini.7z" @@ -1590,16 +1550,14 @@ def test_import(client: TestClient, admin_access_token: str, study_id: str) -> N uuid = client.post( "/v1/studies/_import", files={"study": io.BytesIO(zip_path.read_bytes())}, - headers=admin_headers, ).json() - res = client.get(f"v1/studies/{uuid}", headers=admin_headers).json() + res = client.get(f"v1/studies/{uuid}").json() assert res["groups"] == [{"id": "admin", "name": "admin"}] assert res["public_mode"] == "NONE" # Create user George who belongs to no group client.post( "/v1/users", - headers=admin_headers, json={"name": "George", "password": "mypass"}, ) res = client.post("/v1/login", json={"username": "George", "password": "mypass"}) @@ -1616,11 +1574,42 @@ def test_import(client: TestClient, admin_access_token: str, study_id: str) -> N assert res["groups"] == [] assert res["public_mode"] == "READ" + # create George group + george_group = "george_group" + res = client.post( + "/v1/groups", + json={"id": george_group, "name": george_group}, + ) + assert res.status_code in {200, 201} + # add George to the group as a reader + client.post( + "/v1/roles", + json={"type": 10, "group_id": george_group, "identity_id": 2}, + ) + # reset login to update credentials + res = client.post( + "/v1/refresh", + headers={"Authorization": f'Bearer {george_credentials["refresh_token"]}'}, + ) + george_credentials = res.json() + + # George imports a study, and it should succeed even if he has only "READER" access in the group + georges_headers = {"Authorization": f'Bearer {george_credentials["access_token"]}'} + res = client.post( + "/v1/studies/_import", + files={"study": io.BytesIO(zip_path.read_bytes())}, + headers=georges_headers, + ) + assert res.status_code in {200, 201} + uuid = res.json() + res = client.get(f"v1/studies/{uuid}", headers=georges_headers).json() + assert res["groups"] == [{"id": george_group, "name": george_group}] + assert res["public_mode"] == "NONE" + # Study importer works for 7z files res = client.post( "/v1/studies/_import", files={"study": io.BytesIO(seven_zip_path.read_bytes())}, - headers=admin_headers, ) assert res.status_code == 201 @@ -1674,13 +1663,13 @@ def test_import(client: TestClient, admin_access_token: str, study_id: str) -> N def test_copy(client: TestClient, admin_access_token: str, study_id: str) -> None: - admin_headers = {"Authorization": f"Bearer {admin_access_token}"} + client.headers = {"Authorization": f"Bearer {admin_access_token}"} # Copy a study with admin user who belongs to a group - copied = client.post(f"/v1/studies/{study_id}/copy?dest=copied&use_task=false", headers=admin_headers) + copied = client.post(f"/v1/studies/{study_id}/copy?dest=copied&use_task=false") assert copied.status_code == 201 # asserts that it has admin groups and PublicMode to NONE - res = client.get(f"/v1/studies/{copied.json()}", headers=admin_headers).json() + res = client.get(f"/v1/studies/{copied.json()}").json() assert res["groups"] == [{"id": "admin", "name": "admin"}] assert res["public_mode"] == "NONE" @@ -1695,6 +1684,6 @@ def test_copy(client: TestClient, admin_access_token: str, study_id: str) -> Non ) assert copied.status_code == 201 # asserts that it has no groups and PublicMode to READ - res = client.get(f"/v1/studies/{copied.json()}", headers=admin_headers).json() + res = client.get(f"/v1/studies/{copied.json()}").json() assert res["groups"] == [] assert res["public_mode"] == "READ"