From b6a0398fd5a10fed2ab0f3973c914f75e7fe5a67 Mon Sep 17 00:00:00 2001
From: Samir Kamal <1954121+skamril@users.noreply.github.com>
Date: Fri, 19 Apr 2024 17:51:00 +0200
Subject: [PATCH 01/16] build: prepare hotfix release v2.16.8 (2024-04-19)
---
antarest/__init__.py | 4 ++--
docs/CHANGELOG.md | 4 ++++
setup.py | 2 +-
sonar-project.properties | 2 +-
webapp/package-lock.json | 4 ++--
webapp/package.json | 2 +-
6 files changed, 11 insertions(+), 7 deletions(-)
diff --git a/antarest/__init__.py b/antarest/__init__.py
index 29054100d1..f4fae4cd35 100644
--- a/antarest/__init__.py
+++ b/antarest/__init__.py
@@ -7,9 +7,9 @@
# Standard project metadata
-__version__ = "2.16.7"
+__version__ = "2.16.8"
__author__ = "RTE, Antares Web Team"
-__date__ = "2024-03-05"
+__date__ = "2024-04-19"
# noinspection SpellCheckingInspection
__credits__ = "(c) Réseau de Transport de l’Électricité (RTE)"
diff --git a/docs/CHANGELOG.md b/docs/CHANGELOG.md
index fc42446272..5207a43aa4 100644
--- a/docs/CHANGELOG.md
+++ b/docs/CHANGELOG.md
@@ -1,6 +1,10 @@
Antares Web Changelog
=====================
+v2.16.8 (2024-04-19)
+--------------------
+
+
v2.16.7 (2024-03-05)
--------------------
diff --git a/setup.py b/setup.py
index c3ec3c3060..ce3b1690ea 100644
--- a/setup.py
+++ b/setup.py
@@ -6,7 +6,7 @@
setup(
name="AntaREST",
- version="2.16.7",
+ version="2.16.8",
description="Antares Server",
long_description=Path("README.md").read_text(encoding="utf-8"),
long_description_content_type="text/markdown",
diff --git a/sonar-project.properties b/sonar-project.properties
index 972bef9399..ed0c9da1c1 100644
--- a/sonar-project.properties
+++ b/sonar-project.properties
@@ -6,5 +6,5 @@ sonar.exclusions=antarest/gui.py,antarest/main.py
sonar.python.coverage.reportPaths=coverage.xml
sonar.python.version=3.8
sonar.javascript.lcov.reportPaths=webapp/coverage/lcov.info
-sonar.projectVersion=2.16.7
+sonar.projectVersion=2.16.8
sonar.coverage.exclusions=antarest/gui.py,antarest/main.py,antarest/singleton_services.py,antarest/worker/archive_worker_service.py,webapp/**/*
\ No newline at end of file
diff --git a/webapp/package-lock.json b/webapp/package-lock.json
index 2c81f56aa3..58ecdf282c 100644
--- a/webapp/package-lock.json
+++ b/webapp/package-lock.json
@@ -1,12 +1,12 @@
{
"name": "antares-web",
- "version": "2.16.7",
+ "version": "2.16.8",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "antares-web",
- "version": "2.16.7",
+ "version": "2.16.8",
"dependencies": {
"@emotion/react": "11.11.1",
"@emotion/styled": "11.11.0",
diff --git a/webapp/package.json b/webapp/package.json
index e21bd5dd7e..7c9677bb5c 100644
--- a/webapp/package.json
+++ b/webapp/package.json
@@ -1,6 +1,6 @@
{
"name": "antares-web",
- "version": "2.16.7",
+ "version": "2.16.8",
"private": true,
"type": "module",
"scripts": {
From dcd600da812911f137b4dfa0c378349d91558391 Mon Sep 17 00:00:00 2001
From: Laurent LAPORTE <43534797+laurent-laporte-pro@users.noreply.github.com>
Date: Wed, 13 Mar 2024 10:35:16 +0100
Subject: [PATCH 02/16] fix(st-storages-ui): correction of incorrect wording
between "withdrawal" and "injection" (#1977)
(cherry picked from commit b1cd0d2b83be4c475546aa66c367fe14fd8d7e76)
---
docs/CHANGELOG.md | 5 +++++
webapp/public/locales/en/main.json | 12 ++++++------
webapp/public/locales/fr/main.json | 14 +++++++-------
.../explore/Modelization/Areas/Storages/Matrix.tsx | 4 ++--
4 files changed, 20 insertions(+), 15 deletions(-)
diff --git a/docs/CHANGELOG.md b/docs/CHANGELOG.md
index 5207a43aa4..80cb204daa 100644
--- a/docs/CHANGELOG.md
+++ b/docs/CHANGELOG.md
@@ -4,6 +4,11 @@ Antares Web Changelog
v2.16.8 (2024-04-19)
--------------------
+### Bug Fixes
+
+* **st-storages (ui):** correction of incorrect wording between "withdrawal" and "injection" [`#1977`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1977)
+* **st-storages (ui):** change matrix titles [`#1994`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1994)
+
v2.16.7 (2024-03-05)
--------------------
diff --git a/webapp/public/locales/en/main.json b/webapp/public/locales/en/main.json
index b088d7e0cd..3c82f3881a 100644
--- a/webapp/public/locales/en/main.json
+++ b/webapp/public/locales/en/main.json
@@ -413,14 +413,14 @@
"study.modelization.storages.capacities": "Injection / withdrawal capacities",
"study.modelization.storages.ruleCurves": "Rule Curves",
"study.modelization.storages.inflows": "Inflows",
- "study.modelization.storages.chargeCapacity": "Withdrawal capacity",
- "study.modelization.storages.dischargeCapacity": "Injection capacity",
+ "study.modelization.storages.injectionCapacity": "Injection capacity",
+ "study.modelization.storages.withdrawalCapacity": "Withdrawal capacity",
"study.modelization.storages.lowerRuleCurve": "Lower rule curve",
"study.modelization.storages.upperRuleCurve": "Upper rule curve",
- "study.modelization.storages.injectionNominalCapacity": "Withdrawal (MW)",
- "study.modelization.storages.injectionNominalCapacity.info": "Withdrawal capacity from the network (MW)",
- "study.modelization.storages.withdrawalNominalCapacity": "Injection (MW)",
- "study.modelization.storages.withdrawalNominalCapacity.info": "Injection capacity from stock to the network (MW)",
+ "study.modelization.storages.injectionNominalCapacity": "Injection (MW)",
+ "study.modelization.storages.injectionNominalCapacity.info": "Injection capacity from stock to the network (MW)",
+ "study.modelization.storages.withdrawalNominalCapacity": "Withdrawal (MW)",
+ "study.modelization.storages.withdrawalNominalCapacity.info": "Withdrawal capacity from the network (MW)",
"study.modelization.storages.reservoirCapacity": "Stock (MWh)",
"study.modelization.storages.reservoirCapacity.info": "Stock (MWh)",
"study.modelization.storages.efficiency": "Efficiency (%)",
diff --git a/webapp/public/locales/fr/main.json b/webapp/public/locales/fr/main.json
index 0abb2c9dbc..67d1a51d11 100644
--- a/webapp/public/locales/fr/main.json
+++ b/webapp/public/locales/fr/main.json
@@ -410,17 +410,17 @@
"study.modelization.hydro.allocation.viewMatrix": "Voir les allocations",
"study.modelization.hydro.allocation.error.field.delete": "Erreur lors de la suppression de l'allocation",
"study.modelization.storages": "Stockages",
- "study.modelization.storages.capacities": "Capacités d'injection / soutirage",
+ "study.modelization.storages.capacities": "Capacités d’injection / soutirage",
"study.modelization.storages.ruleCurves": "Courbe guides",
"study.modelization.storages.inflows": "Apports",
- "study.modelization.storages.chargeCapacity": "Capacité de soutirage",
- "study.modelization.storages.dischargeCapacity": "Capacité d'injection",
+ "study.modelization.storages.injectionCapacity": "Capacité d’injection",
+ "study.modelization.storages.withdrawalCapacity": "Capacité de soutirage",
"study.modelization.storages.lowerRuleCurve": "Courbe guide inférieure",
"study.modelization.storages.upperRuleCurve": "Courbe guide supérieure",
- "study.modelization.storages.injectionNominalCapacity": "Soutirage (MW)",
- "study.modelization.storages.injectionNominalCapacity.info": "Capacité de soutirage du stock depuis le réseau (MW)",
- "study.modelization.storages.withdrawalNominalCapacity": "Injection (MW)",
- "study.modelization.storages.withdrawalNominalCapacity.info": "Capacité d'injection du stock vers le réseau (MW)",
+ "study.modelization.storages.injectionNominalCapacity": "Injection (MW)",
+ "study.modelization.storages.injectionNominalCapacity.info": "Capacité d’injection dans le stock depuis le réseau (MW)",
+ "study.modelization.storages.withdrawalNominalCapacity": "Soutirage (MW)",
+ "study.modelization.storages.withdrawalNominalCapacity.info": "Capacité de soutirage du stock vers le réseau (MW)",
"study.modelization.storages.reservoirCapacity": "Stock (MWh)",
"study.modelization.storages.reservoirCapacity.info": "Stock (MWh)",
"study.modelization.storages.efficiency": "Efficacité (%)",
diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/Matrix.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/Matrix.tsx
index bb31f7390c..7bc86cd47a 100644
--- a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/Matrix.tsx
+++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/Matrix.tsx
@@ -56,7 +56,7 @@ function Matrix({ study, areaId, storageId }: Props) {
study={study}
url={`input/st-storage/series/${areaId}/${storageId}/pmax_injection`}
computStats={MatrixStats.NOCOL}
- title={t("study.modelization.storages.chargeCapacity")}
+ title={t("study.modelization.storages.injectionCapacity")}
/>
}
right={
@@ -64,7 +64,7 @@ function Matrix({ study, areaId, storageId }: Props) {
study={study}
url={`input/st-storage/series/${areaId}/${storageId}/pmax_withdrawal`}
computStats={MatrixStats.NOCOL}
- title={t("study.modelization.storages.dischargeCapacity")}
+ title={t("study.modelization.storages.withdrawalCapacity")}
/>
}
sx={{
From 7eadeaf567edac1e85db879b601bfe3027189fa4 Mon Sep 17 00:00:00 2001
From: Samir Kamal <1954121+skamril@users.noreply.github.com>
Date: Tue, 2 Apr 2024 14:02:07 +0200
Subject: [PATCH 03/16] fix(st-storages-ui): change matrix titles (#1994)
(cherry picked from commit 7004692ff3011462427c173e1d145e9afb085e72)
---
webapp/public/locales/en/main.json | 6 +++---
webapp/public/locales/fr/main.json | 6 +++---
.../explore/Modelization/Areas/Storages/Matrix.tsx | 8 +++++---
3 files changed, 11 insertions(+), 9 deletions(-)
diff --git a/webapp/public/locales/en/main.json b/webapp/public/locales/en/main.json
index 3c82f3881a..b64b0d86bb 100644
--- a/webapp/public/locales/en/main.json
+++ b/webapp/public/locales/en/main.json
@@ -410,11 +410,11 @@
"study.modelization.hydro.allocation.viewMatrix": "View all allocations",
"study.modelization.hydro.allocation.error.field.delete": "Error when deleting the allocation",
"study.modelization.storages": "Storages",
- "study.modelization.storages.capacities": "Injection / withdrawal capacities",
+ "study.modelization.storages.modulation": "Injection / Withdrawal Modulation",
"study.modelization.storages.ruleCurves": "Rule Curves",
"study.modelization.storages.inflows": "Inflows",
- "study.modelization.storages.injectionCapacity": "Injection capacity",
- "study.modelization.storages.withdrawalCapacity": "Withdrawal capacity",
+ "study.modelization.storages.injectionModulation": "Injection Modulation",
+ "study.modelization.storages.withdrawalModulation": "Withdrawal Modulation",
"study.modelization.storages.lowerRuleCurve": "Lower rule curve",
"study.modelization.storages.upperRuleCurve": "Upper rule curve",
"study.modelization.storages.injectionNominalCapacity": "Injection (MW)",
diff --git a/webapp/public/locales/fr/main.json b/webapp/public/locales/fr/main.json
index 67d1a51d11..cf8577422e 100644
--- a/webapp/public/locales/fr/main.json
+++ b/webapp/public/locales/fr/main.json
@@ -410,11 +410,11 @@
"study.modelization.hydro.allocation.viewMatrix": "Voir les allocations",
"study.modelization.hydro.allocation.error.field.delete": "Erreur lors de la suppression de l'allocation",
"study.modelization.storages": "Stockages",
- "study.modelization.storages.capacities": "Capacités d’injection / soutirage",
+ "study.modelization.storages.modulation": "Modulation de l’injection / soutirage",
"study.modelization.storages.ruleCurves": "Courbe guides",
"study.modelization.storages.inflows": "Apports",
- "study.modelization.storages.injectionCapacity": "Capacité d’injection",
- "study.modelization.storages.withdrawalCapacity": "Capacité de soutirage",
+ "study.modelization.storages.injectionModulation": "Modulation de l’injection",
+ "study.modelization.storages.withdrawalModulation": "Modulation du soutirage",
"study.modelization.storages.lowerRuleCurve": "Courbe guide inférieure",
"study.modelization.storages.upperRuleCurve": "Courbe guide supérieure",
"study.modelization.storages.injectionNominalCapacity": "Injection (MW)",
diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/Matrix.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/Matrix.tsx
index 7bc86cd47a..63ff535498 100644
--- a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/Matrix.tsx
+++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/Matrix.tsx
@@ -35,7 +35,7 @@ function Matrix({ study, areaId, storageId }: Props) {
}}
>
setValue(v)}>
-
+
@@ -56,7 +56,7 @@ function Matrix({ study, areaId, storageId }: Props) {
study={study}
url={`input/st-storage/series/${areaId}/${storageId}/pmax_injection`}
computStats={MatrixStats.NOCOL}
- title={t("study.modelization.storages.injectionCapacity")}
+ title={t("study.modelization.storages.injectionModulation")}
/>
}
right={
@@ -64,7 +64,9 @@ function Matrix({ study, areaId, storageId }: Props) {
study={study}
url={`input/st-storage/series/${areaId}/${storageId}/pmax_withdrawal`}
computStats={MatrixStats.NOCOL}
- title={t("study.modelization.storages.withdrawalCapacity")}
+ title={t(
+ "study.modelization.storages.withdrawalModulation",
+ )}
/>
}
sx={{
From 2c52f37c7134e64270cc0caec9cb8d0b0b6dfb28 Mon Sep 17 00:00:00 2001
From: MartinBelthle <102529366+martinbelthle@users.noreply.github.com>
Date: Sat, 9 Mar 2024 14:14:40 +0100
Subject: [PATCH 04/16] feat(clusters): add new endpoint for clusters
duplication (#1972)
(cherry picked from commit fcdb1c9f2aab897cea293a2a8716ec356989f150)
---
antarest/core/exceptions.py | 10 +
.../business/areas/renewable_management.py | 82 +++++--
.../business/areas/st_storage_management.py | 89 +++++--
.../business/areas/thermal_management.py | 93 +++++--
antarest/study/web/study_data_blueprint.py | 50 ++++
docs/CHANGELOG.md | 5 +-
.../study_data_blueprint/test_renewable.py | 232 +++++++++++++++++-
.../study_data_blueprint/test_st_storage.py | 209 +++++++++++++++-
.../study_data_blueprint/test_thermal.py | 228 ++++++++++++++++-
9 files changed, 927 insertions(+), 71 deletions(-)
diff --git a/antarest/core/exceptions.py b/antarest/core/exceptions.py
index 9a2230c1d1..4d6c1a2f5f 100644
--- a/antarest/core/exceptions.py
+++ b/antarest/core/exceptions.py
@@ -304,3 +304,13 @@ def __init__(self, area_id: str) -> None:
HTTPStatus.NOT_FOUND,
f"Cluster configuration for area: '{area_id}' not found",
)
+
+
+class ClusterAlreadyExists(HTTPException):
+ """Exception raised when attempting to create a cluster with an already existing ID."""
+
+ def __init__(self, cluster_type: str, cluster_id: str) -> None:
+ super().__init__(
+ HTTPStatus.CONFLICT,
+ f"{cluster_type} cluster with ID '{cluster_id}' already exists and could not be created.",
+ )
diff --git a/antarest/study/business/areas/renewable_management.py b/antarest/study/business/areas/renewable_management.py
index ab9a2e9802..c4152924bf 100644
--- a/antarest/study/business/areas/renewable_management.py
+++ b/antarest/study/business/areas/renewable_management.py
@@ -3,10 +3,11 @@
from pydantic import validator
-from antarest.core.exceptions import ClusterConfigNotFound, ClusterNotFound
+from antarest.core.exceptions import ClusterAlreadyExists, ClusterConfigNotFound, ClusterNotFound
from antarest.study.business.enum_ignore_case import EnumIgnoreCase
from antarest.study.business.utils import AllOptionalMetaclass, camel_case_model, execute_or_add_commands
from antarest.study.model import Study
+from antarest.study.storage.rawstudy.model.filesystem.config.model import transform_name_to_id
from antarest.study.storage.rawstudy.model.filesystem.config.renewable import (
RenewableConfig,
RenewableConfigType,
@@ -17,6 +18,7 @@
from antarest.study.storage.storage_service import StudyStorageService
from antarest.study.storage.variantstudy.model.command.create_renewables_cluster import CreateRenewablesCluster
from antarest.study.storage.variantstudy.model.command.remove_renewables_cluster import RemoveRenewablesCluster
+from antarest.study.storage.variantstudy.model.command.replace_matrix import ReplaceMatrix
from antarest.study.storage.variantstudy.model.command.update_config import UpdateConfig
__all__ = (
@@ -47,7 +49,7 @@ class Config:
def schema_extra(schema: t.MutableMapping[str, t.Any]) -> None:
schema["example"] = RenewableClusterInput(
group="Gas",
- name="2 avail and must 1",
+ name="Gas Cluster XY",
enabled=False,
unitCount=100,
nominalCapacity=1000.0,
@@ -85,9 +87,9 @@ class Config:
@staticmethod
def schema_extra(schema: t.MutableMapping[str, t.Any]) -> None:
schema["example"] = RenewableClusterOutput(
- id="2 avail and must 1",
+ id="Gas cluster YZ",
group="Gas",
- name="2 avail and must 1",
+ name="Gas Cluster YZ",
enabled=False,
unitCount=100,
nominalCapacity=1000.0,
@@ -157,23 +159,25 @@ def create_cluster(
The newly created cluster.
"""
file_study = self._get_file_study(study)
- study_version = study.version
- cluster = cluster_data.to_config(study_version)
-
- command = CreateRenewablesCluster(
- area_id=area_id,
- cluster_name=cluster.id,
- parameters=cluster.dict(by_alias=True, exclude={"id"}),
- command_context=self.storage_service.variant_study_service.command_factory.command_context,
- )
+ cluster = cluster_data.to_config(study.version)
+ command = self._make_create_cluster_cmd(area_id, cluster)
execute_or_add_commands(
study,
file_study,
[command],
self.storage_service,
)
+ output = self.get_cluster(study, area_id, cluster.id)
+ return output
- return self.get_cluster(study, area_id, cluster.id)
+ def _make_create_cluster_cmd(self, area_id: str, cluster: RenewableConfigType) -> CreateRenewablesCluster:
+ command = CreateRenewablesCluster(
+ area_id=area_id,
+ cluster_name=cluster.id,
+ parameters=cluster.dict(by_alias=True, exclude={"id"}),
+ command_context=self.storage_service.variant_study_service.command_factory.command_context,
+ )
+ return command
def get_cluster(self, study: Study, area_id: str, cluster_id: str) -> RenewableClusterOutput:
"""
@@ -273,3 +277,53 @@ def delete_clusters(self, study: Study, area_id: str, cluster_ids: t.Sequence[st
]
execute_or_add_commands(study, file_study, commands, self.storage_service)
+
+ def duplicate_cluster(
+ self,
+ study: Study,
+ area_id: str,
+ source_id: str,
+ new_cluster_name: str,
+ ) -> RenewableClusterOutput:
+ """
+ Creates a duplicate cluster within the study area with a new name.
+
+ Args:
+ study: The study in which the cluster will be duplicated.
+ area_id: The identifier of the area where the cluster will be duplicated.
+ source_id: The identifier of the cluster to be duplicated.
+ new_cluster_name: The new name for the duplicated cluster.
+
+ Returns:
+ The duplicated cluster configuration.
+
+ Raises:
+ ClusterAlreadyExists: If a cluster with the new name already exists in the area.
+ """
+ new_id = transform_name_to_id(new_cluster_name, lower=False)
+ lower_new_id = new_id.lower()
+ if any(lower_new_id == cluster.id.lower() for cluster in self.get_clusters(study, area_id)):
+ raise ClusterAlreadyExists("Renewable", new_id)
+
+ # Cluster duplication
+ current_cluster = self.get_cluster(study, area_id, source_id)
+ current_cluster.name = new_cluster_name
+ creation_form = RenewableClusterCreation(**current_cluster.dict(by_alias=False, exclude={"id"}))
+ new_config = creation_form.to_config(study.version)
+ create_cluster_cmd = self._make_create_cluster_cmd(area_id, new_config)
+
+ # Matrix edition
+ lower_source_id = source_id.lower()
+ source_path = f"input/renewables/series/{area_id}/{lower_source_id}/series"
+ new_path = f"input/renewables/series/{area_id}/{lower_new_id}/series"
+
+ # Prepare and execute commands
+ storage_service = self.storage_service.get_storage(study)
+ command_context = self.storage_service.variant_study_service.command_factory.command_context
+ current_matrix = storage_service.get(study, source_path)["data"]
+ replace_matrix_cmd = ReplaceMatrix(target=new_path, matrix=current_matrix, command_context=command_context)
+ commands = [create_cluster_cmd, replace_matrix_cmd]
+
+ execute_or_add_commands(study, self._get_file_study(study), commands, self.storage_service)
+
+ return RenewableClusterOutput(**new_config.dict(by_alias=False))
diff --git a/antarest/study/business/areas/st_storage_management.py b/antarest/study/business/areas/st_storage_management.py
index d18dce9f9c..ca498c030a 100644
--- a/antarest/study/business/areas/st_storage_management.py
+++ b/antarest/study/business/areas/st_storage_management.py
@@ -8,12 +8,14 @@
from typing_extensions import Literal
from antarest.core.exceptions import (
+ ClusterAlreadyExists,
STStorageConfigNotFoundError,
STStorageFieldsNotFoundError,
STStorageMatrixNotFoundError,
)
from antarest.study.business.utils import AllOptionalMetaclass, camel_case_model, execute_or_add_commands
from antarest.study.model import Study
+from antarest.study.storage.rawstudy.model.filesystem.config.model import transform_name_to_id
from antarest.study.storage.rawstudy.model.filesystem.config.st_storage import (
STStorageConfig,
STStorageGroup,
@@ -24,6 +26,7 @@
from antarest.study.storage.storage_service import StudyStorageService
from antarest.study.storage.variantstudy.model.command.create_st_storage import CreateSTStorage
from antarest.study.storage.variantstudy.model.command.remove_st_storage import RemoveSTStorage
+from antarest.study.storage.variantstudy.model.command.replace_matrix import ReplaceMatrix
from antarest.study.storage.variantstudy.model.command.update_config import UpdateConfig
__all__ = (
@@ -72,8 +75,8 @@ def validate_name(cls, name: t.Optional[str]) -> str:
raise ValueError("'name' must not be empty")
return name
- @property
- def to_config(self) -> STStorageConfig:
+ # noinspection PyUnusedLocal
+ def to_config(self, study_version: t.Union[str, int]) -> STStorageConfig:
values = self.dict(by_alias=False, exclude_none=True)
return STStorageConfig(**values)
@@ -203,7 +206,7 @@ def validate_rule_curve(
upper_array = np.array(upper_rule_curve.data, dtype=np.float64)
# noinspection PyUnresolvedReferences
if (lower_array > upper_array).any():
- raise ValueError("Each 'lower_rule_curve' value must be lower" " or equal to each 'upper_rule_curve'")
+ raise ValueError("Each 'lower_rule_curve' value must be lower or equal to each 'upper_rule_curve'")
return values
@@ -257,21 +260,25 @@ def create_storage(
Returns:
The ID of the newly created short-term storage.
"""
- storage = form.to_config
- command = CreateSTStorage(
- area_id=area_id,
- parameters=storage,
- command_context=self.storage_service.variant_study_service.command_factory.command_context,
- )
file_study = self._get_file_study(study)
+ storage = form.to_config(study.version)
+ command = self._make_create_cluster_cmd(area_id, storage)
execute_or_add_commands(
study,
file_study,
[command],
self.storage_service,
)
+ output = self.get_storage(study, area_id, storage_id=storage.id)
+ return output
- return self.get_storage(study, area_id, storage_id=storage.id)
+ def _make_create_cluster_cmd(self, area_id: str, cluster: STStorageConfig) -> CreateSTStorage:
+ command = CreateSTStorage(
+ area_id=area_id,
+ parameters=cluster,
+ command_context=self.storage_service.variant_study_service.command_factory.command_context,
+ )
+ return command
def get_storages(
self,
@@ -418,6 +425,59 @@ def delete_storages(
file_study = self._get_file_study(study)
execute_or_add_commands(study, file_study, [command], self.storage_service)
+ def duplicate_cluster(self, study: Study, area_id: str, source_id: str, new_cluster_name: str) -> STStorageOutput:
+ """
+ Creates a duplicate cluster within the study area with a new name.
+
+ Args:
+ study: The study in which the cluster will be duplicated.
+ area_id: The identifier of the area where the cluster will be duplicated.
+ source_id: The identifier of the cluster to be duplicated.
+ new_cluster_name: The new name for the duplicated cluster.
+
+ Returns:
+ The duplicated cluster configuration.
+
+ Raises:
+ ClusterAlreadyExists: If a cluster with the new name already exists in the area.
+ """
+ new_id = transform_name_to_id(new_cluster_name)
+ lower_new_id = new_id.lower()
+ if any(lower_new_id == storage.id.lower() for storage in self.get_storages(study, area_id)):
+ raise ClusterAlreadyExists("Short-term storage", new_id)
+
+ # Cluster duplication
+ current_cluster = self.get_storage(study, area_id, source_id)
+ current_cluster.name = new_cluster_name
+ creation_form = STStorageCreation(**current_cluster.dict(by_alias=False, exclude={"id"}))
+ new_config = creation_form.to_config(study.version)
+ create_cluster_cmd = self._make_create_cluster_cmd(area_id, new_config)
+
+ # Matrix edition
+ lower_source_id = source_id.lower()
+ ts_names = ["pmax_injection", "pmax_withdrawal", "lower_rule_curve", "upper_rule_curve", "inflows"]
+ source_paths = [
+ STORAGE_SERIES_PATH.format(area_id=area_id, storage_id=lower_source_id, ts_name=ts_name)
+ for ts_name in ts_names
+ ]
+ new_paths = [
+ STORAGE_SERIES_PATH.format(area_id=area_id, storage_id=lower_new_id, ts_name=ts_name)
+ for ts_name in ts_names
+ ]
+
+ # Prepare and execute commands
+ commands: t.List[t.Union[CreateSTStorage, ReplaceMatrix]] = [create_cluster_cmd]
+ storage_service = self.storage_service.get_storage(study)
+ command_context = self.storage_service.variant_study_service.command_factory.command_context
+ for source_path, new_path in zip(source_paths, new_paths):
+ current_matrix = storage_service.get(study, source_path)["data"]
+ command = ReplaceMatrix(target=new_path, matrix=current_matrix, command_context=command_context)
+ commands.append(command)
+
+ execute_or_add_commands(study, self._get_file_study(study), commands, self.storage_service)
+
+ return STStorageOutput(**new_config.dict(by_alias=False))
+
def get_matrix(
self,
study: Study,
@@ -484,12 +544,11 @@ def _save_matrix_obj(
ts_name: STStorageTimeSeries,
matrix_obj: t.Dict[str, t.Any],
) -> None:
- file_study = self._get_file_study(study)
path = STORAGE_SERIES_PATH.format(area_id=area_id, storage_id=storage_id, ts_name=ts_name)
- try:
- file_study.tree.save(matrix_obj, path.split("/"))
- except KeyError:
- raise STStorageMatrixNotFoundError(study.id, area_id, storage_id, ts_name) from None
+ matrix = matrix_obj["data"]
+ command_context = self.storage_service.variant_study_service.command_factory.command_context
+ command = ReplaceMatrix(target=path, matrix=matrix, command_context=command_context)
+ execute_or_add_commands(study, self._get_file_study(study), [command], self.storage_service)
def validate_matrices(
self,
diff --git a/antarest/study/business/areas/thermal_management.py b/antarest/study/business/areas/thermal_management.py
index dfcc52a2a0..f44ad7ba10 100644
--- a/antarest/study/business/areas/thermal_management.py
+++ b/antarest/study/business/areas/thermal_management.py
@@ -3,9 +3,10 @@
from pydantic import validator
-from antarest.core.exceptions import ClusterConfigNotFound, ClusterNotFound
+from antarest.core.exceptions import ClusterAlreadyExists, ClusterConfigNotFound, ClusterNotFound
from antarest.study.business.utils import AllOptionalMetaclass, camel_case_model, execute_or_add_commands
from antarest.study.model import Study
+from antarest.study.storage.rawstudy.model.filesystem.config.model import transform_name_to_id
from antarest.study.storage.rawstudy.model.filesystem.config.thermal import (
Thermal860Config,
Thermal860Properties,
@@ -16,6 +17,7 @@
from antarest.study.storage.storage_service import StudyStorageService
from antarest.study.storage.variantstudy.model.command.create_cluster import CreateCluster
from antarest.study.storage.variantstudy.model.command.remove_cluster import RemoveCluster
+from antarest.study.storage.variantstudy.model.command.replace_matrix import ReplaceMatrix
from antarest.study.storage.variantstudy.model.command.update_config import UpdateConfig
__all__ = (
@@ -40,7 +42,7 @@ class Config:
def schema_extra(schema: t.MutableMapping[str, t.Any]) -> None:
schema["example"] = ThermalClusterInput(
group="Gas",
- name="2 avail and must 1",
+ name="Gas Cluster XY",
enabled=False,
unitCount=100,
nominalCapacity=1000.0,
@@ -79,9 +81,9 @@ class Config:
@staticmethod
def schema_extra(schema: t.MutableMapping[str, t.Any]) -> None:
schema["example"] = ThermalClusterOutput(
- id="2 avail and must 1",
+ id="Gas cluster YZ",
group="Gas",
- name="2 avail and must 1",
+ name="Gas Cluster YZ",
enabled=False,
unitCount=100,
nominalCapacity=1000.0,
@@ -190,16 +192,8 @@ def create_cluster(self, study: Study, area_id: str, cluster_data: ThermalCluste
"""
file_study = self._get_file_study(study)
- study_version = study.version
- cluster = cluster_data.to_config(study_version)
- # NOTE: currently, in the `CreateCluster` class, there is a confusion
- # between the cluster name and the cluster ID (which is a section name).
- command = CreateCluster(
- area_id=area_id,
- cluster_name=cluster.id,
- parameters=cluster.dict(by_alias=True, exclude={"id"}),
- command_context=self.storage_service.variant_study_service.command_factory.command_context,
- )
+ cluster = cluster_data.to_config(study.version)
+ command = self._make_create_cluster_cmd(area_id, cluster)
execute_or_add_commands(
study,
file_study,
@@ -209,6 +203,17 @@ def create_cluster(self, study: Study, area_id: str, cluster_data: ThermalCluste
output = self.get_cluster(study, area_id, cluster.id)
return output
+ def _make_create_cluster_cmd(self, area_id: str, cluster: ThermalConfigType) -> CreateCluster:
+ # NOTE: currently, in the `CreateCluster` class, there is a confusion
+ # between the cluster name and the cluster ID (which is a section name).
+ command = CreateCluster(
+ area_id=area_id,
+ cluster_name=cluster.id,
+ parameters=cluster.dict(by_alias=True, exclude={"id"}),
+ command_context=self.storage_service.variant_study_service.command_factory.command_context,
+ )
+ return command
+
def update_cluster(
self,
study: Study,
@@ -286,3 +291,63 @@ def delete_clusters(self, study: Study, area_id: str, cluster_ids: t.Sequence[st
]
execute_or_add_commands(study, file_study, commands, self.storage_service)
+
+ def duplicate_cluster(
+ self,
+ study: Study,
+ area_id: str,
+ source_id: str,
+ new_cluster_name: str,
+ ) -> ThermalClusterOutput:
+ """
+ Creates a duplicate cluster within the study area with a new name.
+
+ Args:
+ study: The study in which the cluster will be duplicated.
+ area_id: The identifier of the area where the cluster will be duplicated.
+ source_id: The identifier of the cluster to be duplicated.
+ new_cluster_name: The new name for the duplicated cluster.
+
+ Returns:
+ The duplicated cluster configuration.
+
+ Raises:
+ ClusterAlreadyExists: If a cluster with the new name already exists in the area.
+ """
+ new_id = transform_name_to_id(new_cluster_name, lower=False)
+ lower_new_id = new_id.lower()
+ if any(lower_new_id == cluster.id.lower() for cluster in self.get_clusters(study, area_id)):
+ raise ClusterAlreadyExists("Thermal", new_id)
+
+ # Cluster duplication
+ source_cluster = self.get_cluster(study, area_id, source_id)
+ source_cluster.name = new_cluster_name
+ creation_form = ThermalClusterCreation(**source_cluster.dict(by_alias=False, exclude={"id"}))
+ new_config = creation_form.to_config(study.version)
+ create_cluster_cmd = self._make_create_cluster_cmd(area_id, new_config)
+
+ # Matrix edition
+ lower_source_id = source_id.lower()
+ source_paths = [
+ f"input/thermal/series/{area_id}/{lower_source_id}/series",
+ f"input/thermal/prepro/{area_id}/{lower_source_id}/modulation",
+ f"input/thermal/prepro/{area_id}/{lower_source_id}/data",
+ ]
+ new_paths = [
+ f"input/thermal/series/{area_id}/{lower_new_id}/series",
+ f"input/thermal/prepro/{area_id}/{lower_new_id}/modulation",
+ f"input/thermal/prepro/{area_id}/{lower_new_id}/data",
+ ]
+
+ # Prepare and execute commands
+ commands: t.List[t.Union[CreateCluster, ReplaceMatrix]] = [create_cluster_cmd]
+ storage_service = self.storage_service.get_storage(study)
+ command_context = self.storage_service.variant_study_service.command_factory.command_context
+ for source_path, new_path in zip(source_paths, new_paths):
+ current_matrix = storage_service.get(study, source_path)["data"]
+ command = ReplaceMatrix(target=new_path, matrix=current_matrix, command_context=command_context)
+ commands.append(command)
+
+ execute_or_add_commands(study, self._get_file_study(study), commands, self.storage_service)
+
+ return ThermalClusterOutput(**new_config.dict(by_alias=False))
diff --git a/antarest/study/web/study_data_blueprint.py b/antarest/study/web/study_data_blueprint.py
index c7e6ac17fd..bc667f45d5 100644
--- a/antarest/study/web/study_data_blueprint.py
+++ b/antarest/study/web/study_data_blueprint.py
@@ -1,3 +1,4 @@
+import enum
import logging
from http import HTTPStatus
from typing import Any, Dict, List, Optional, Sequence, Union, cast
@@ -24,10 +25,12 @@
RenewableClusterCreation,
RenewableClusterInput,
RenewableClusterOutput,
+ RenewableManager,
)
from antarest.study.business.areas.st_storage_management import (
STStorageCreation,
STStorageInput,
+ STStorageManager,
STStorageMatrix,
STStorageOutput,
STStorageTimeSeries,
@@ -36,6 +39,7 @@
ThermalClusterCreation,
ThermalClusterInput,
ThermalClusterOutput,
+ ThermalManager,
)
from antarest.study.business.binding_constraint_management import (
BindingConstraintPropertiesWithName,
@@ -58,6 +62,20 @@
logger = logging.getLogger(__name__)
+class ClusterType(str, enum.Enum):
+ """
+ Cluster type:
+
+ - `STORAGE`: short-term storages
+ - `RENEWABLES`: renewable clusters
+ - `THERMALS`: thermal clusters
+ """
+
+ ST_STORAGES = "storages"
+ RENEWABLES = "renewables"
+ THERMALS = "thermals"
+
+
def create_study_data_routes(study_service: StudyService, config: Config) -> APIRouter:
"""
Endpoint implementation for studies area management
@@ -2019,4 +2037,36 @@ def delete_st_storages(
study = study_service.check_study_access(uuid, StudyPermissionType.WRITE, params)
study_service.st_storage_manager.delete_storages(study, area_id, storage_ids)
+ @bp.post(
+ path="/studies/{uuid}/areas/{area_id}/{cluster_type}/{source_cluster_id}",
+ tags=[APITag.study_data],
+ summary="Duplicates a given cluster",
+ )
+ def duplicate_cluster(
+ uuid: str,
+ area_id: str,
+ cluster_type: ClusterType,
+ source_cluster_id: str,
+ new_cluster_name: str = Query(..., alias="newName", title="New Cluster Name"), # type: ignore
+ current_user: JWTUser = Depends(auth.get_current_user),
+ ) -> Union[STStorageOutput, ThermalClusterOutput, RenewableClusterOutput]:
+ logger.info(
+ f"Duplicates {cluster_type.value} {source_cluster_id} of {area_id} for study {uuid}",
+ extra={"user": current_user.id},
+ )
+ params = RequestParameters(user=current_user)
+ study = study_service.check_study_access(uuid, StudyPermissionType.WRITE, params)
+
+ manager: Union[STStorageManager, RenewableManager, ThermalManager]
+ if cluster_type == ClusterType.ST_STORAGES:
+ manager = STStorageManager(study_service.storage_service)
+ elif cluster_type == ClusterType.RENEWABLES:
+ manager = RenewableManager(study_service.storage_service)
+ elif cluster_type == ClusterType.THERMALS:
+ manager = ThermalManager(study_service.storage_service)
+ else: # pragma: no cover
+ raise NotImplementedError(f"Cluster type {cluster_type} not implemented")
+
+ return manager.duplicate_cluster(study, area_id, source_cluster_id, new_cluster_name)
+
return bp
diff --git a/docs/CHANGELOG.md b/docs/CHANGELOG.md
index 80cb204daa..41a9028209 100644
--- a/docs/CHANGELOG.md
+++ b/docs/CHANGELOG.md
@@ -4,12 +4,15 @@ Antares Web Changelog
v2.16.8 (2024-04-19)
--------------------
+### Features
+
+* **clusters:** add new endpoint for clusters duplication [`#1972`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1972)
+
### Bug Fixes
* **st-storages (ui):** correction of incorrect wording between "withdrawal" and "injection" [`#1977`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1977)
* **st-storages (ui):** change matrix titles [`#1994`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1994)
-
v2.16.7 (2024-03-05)
--------------------
diff --git a/tests/integration/study_data_blueprint/test_renewable.py b/tests/integration/study_data_blueprint/test_renewable.py
index 14f1f4388a..8447c0430f 100644
--- a/tests/integration/study_data_blueprint/test_renewable.py
+++ b/tests/integration/study_data_blueprint/test_renewable.py
@@ -25,7 +25,9 @@
"""
import json
import re
+import typing as t
+import numpy as np
import pytest
from starlette.testclient import TestClient
@@ -132,7 +134,23 @@ def test_lifecycle(
# RENEWABLE CLUSTER MATRICES
# =============================
- # TODO: add unit tests for renewable cluster matrices
+ matrix = np.random.randint(0, 2, size=(8760, 1)).tolist()
+ matrix_path = f"input/renewables/series/{area_id}/{fr_solar_pv_id.lower()}/series"
+ args = {"target": matrix_path, "matrix": matrix}
+ res = client.post(
+ f"/v1/studies/{study_id}/commands",
+ json=[{"action": "replace_matrix", "args": args}],
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ )
+ assert res.status_code in {200, 201}, res.json()
+
+ res = client.get(
+ f"/v1/studies/{study_id}/raw",
+ params={"path": matrix_path},
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ )
+ assert res.status_code == 200
+ assert res.json()["data"] == matrix
# ==================================
# RENEWABLE CLUSTER LIST / GROUPS
@@ -211,6 +229,34 @@ def test_lifecycle(
assert res.status_code == 200, res.json()
assert res.json() == fr_solar_pv_cfg
+ # ===============================
+ # RENEWABLE CLUSTER DUPLICATION
+ # ===============================
+
+ new_name = "Duplicate of SolarPV"
+ res = client.post(
+ f"/v1/studies/{study_id}/areas/{area_id}/renewables/{fr_solar_pv_id}",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ params={"newName": new_name},
+ )
+ # asserts the config is the same
+ assert res.status_code in {200, 201}, res.json()
+ duplicated_config = dict(fr_solar_pv_cfg)
+ duplicated_config["name"] = new_name
+ duplicated_id = transform_name_to_id(new_name, lower=False)
+ duplicated_config["id"] = duplicated_id
+ assert res.json() == duplicated_config
+
+ # asserts the matrix has also been duplicated
+ new_cluster_matrix_path = f"input/renewables/series/{area_id}/{duplicated_id.lower()}/series"
+ res = client.get(
+ f"/v1/studies/{study_id}/raw",
+ params={"path": new_cluster_matrix_path},
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ )
+ assert res.status_code == 200
+ assert res.json()["data"] == matrix
+
# =============================
# RENEWABLE CLUSTER DELETION
# =============================
@@ -237,10 +283,11 @@ def test_lifecycle(
# It's possible to delete multiple renewable clusters at once.
# Create two clusters
+ other_cluster_name = "Other Cluster 1"
res = client.post(
f"/v1/studies/{study_id}/areas/{area_id}/clusters/renewable",
headers={"Authorization": f"Bearer {user_access_token}"},
- json={"name": "Other Cluster 1"},
+ json={"name": other_cluster_name},
)
assert res.status_code == 200, res.json()
other_cluster_id1 = res.json()["id"]
@@ -253,28 +300,24 @@ def test_lifecycle(
assert res.status_code == 200, res.json()
other_cluster_id2 = res.json()["id"]
- # We can delete the two renewable clusters at once.
+ # We can delete two renewable clusters at once.
res = client.request(
"DELETE",
f"/v1/studies/{study_id}/areas/{area_id}/clusters/renewable",
headers={"Authorization": f"Bearer {user_access_token}"},
- json=[other_cluster_id1, other_cluster_id2],
+ json=[other_cluster_id2, duplicated_id],
)
assert res.status_code == 204, res.json()
assert res.text in {"", "null"} # Old FastAPI versions return 'null'.
- # The list of renewable clusters should be empty.
+ # There should only be one remaining cluster
res = client.get(
f"/v1/studies/{study_id}/areas/{area_id}/clusters/renewable",
headers={"Authorization": f"Bearer {user_access_token}"},
)
- assert res.status_code == 200, res.json()
- expected = [
- c
- for c in EXISTING_CLUSTERS
- if transform_name_to_id(c["name"], lower=False) not in [other_cluster_id1, other_cluster_id2]
- ]
- assert res.json() == expected
+ assert res.status_code == 200
+ obj = res.json()
+ assert len(obj) == 1
# ===========================
# RENEWABLE CLUSTER ERRORS
@@ -422,3 +465,168 @@ def test_lifecycle(
obj = res.json()
description = obj["description"]
assert bad_study_id in description
+
+ # Cannot duplicate a fake cluster
+ unknown_id = "unknown"
+ res = client.post(
+ f"/v1/studies/{study_id}/areas/{area_id}/renewables/{unknown_id}",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ params={"newName": "duplicata"},
+ )
+ assert res.status_code == 404
+ obj = res.json()
+ assert obj["description"] == f"Cluster: '{unknown_id}' not found"
+ assert obj["exception"] == "ClusterNotFound"
+
+ # Cannot duplicate with an existing id
+ res = client.post(
+ f"/v1/studies/{study_id}/areas/{area_id}/renewables/{other_cluster_id1}",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ params={"newName": other_cluster_name.upper()}, # different case, but same ID
+ )
+ assert res.status_code == 409, res.json()
+ obj = res.json()
+ description = obj["description"]
+ assert other_cluster_name.upper() in description
+ assert obj["exception"] == "ClusterAlreadyExists"
+
+ @pytest.fixture(name="base_study_id")
+ def base_study_id_fixture(self, request: t.Any, client: TestClient, user_access_token: str) -> str:
+ """Prepare a managed study for the variant study tests."""
+ params = request.param
+ res = client.post(
+ "/v1/studies",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ params=params,
+ )
+ assert res.status_code in {200, 201}, res.json()
+ study_id: str = res.json()
+ return study_id
+
+ @pytest.fixture(name="variant_id")
+ def variant_id_fixture(self, request: t.Any, client: TestClient, user_access_token: str, base_study_id: str) -> str:
+ """Prepare a variant study for the variant study tests."""
+ name = request.param
+ res = client.post(
+ f"/v1/studies/{base_study_id}/variants",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ params={"name": name},
+ )
+ assert res.status_code in {200, 201}, res.json()
+ study_id: str = res.json()
+ return study_id
+
+ # noinspection PyTestParametrized
+ @pytest.mark.parametrize("base_study_id", [{"name": "Base Study", "version": 860}], indirect=True)
+ @pytest.mark.parametrize("variant_id", ["Variant Study"], indirect=True)
+ def test_variant_lifecycle(self, client: TestClient, user_access_token: str, variant_id: str) -> None:
+ """
+ In this test, we want to check that renewable clusters can be managed
+ in the context of a "variant" study.
+ """
+ # Create an area
+ area_name = "France"
+ res = client.post(
+ f"/v1/studies/{variant_id}/areas",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ json={"name": area_name, "type": "AREA"},
+ )
+ assert res.status_code in {200, 201}, res.json()
+ area_cfg = res.json()
+ area_id = area_cfg["id"]
+
+ # Create a renewable cluster
+ cluster_name = "Th1"
+ res = client.post(
+ f"/v1/studies/{variant_id}/areas/{area_id}/clusters/renewable",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ json={
+ "name": cluster_name,
+ "group": "Wind Offshore",
+ "unitCount": 13,
+ "nominalCapacity": 42500,
+ },
+ )
+ assert res.status_code in {200, 201}, res.json()
+ cluster_id: str = res.json()["id"]
+
+ # Update the renewable cluster
+ res = client.patch(
+ f"/v1/studies/{variant_id}/areas/{area_id}/clusters/renewable/{cluster_id}",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ json={"unitCount": 15},
+ )
+ assert res.status_code == 200, res.json()
+ cluster_cfg = res.json()
+ assert cluster_cfg["unitCount"] == 15
+
+ # Update the series matrix
+ matrix = np.random.randint(0, 2, size=(8760, 1)).tolist()
+ matrix_path = f"input/renewables/series/{area_id}/{cluster_id.lower()}/series"
+ args = {"target": matrix_path, "matrix": matrix}
+ res = client.post(
+ f"/v1/studies/{variant_id}/commands",
+ json=[{"action": "replace_matrix", "args": args}],
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ )
+ assert res.status_code in {200, 201}, res.json()
+
+ # Duplicate the renewable cluster
+ new_name = "Th2"
+ res = client.post(
+ f"/v1/studies/{variant_id}/areas/{area_id}/renewables/{cluster_id}",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ params={"newName": new_name},
+ )
+ assert res.status_code in {200, 201}, res.json()
+ cluster_cfg = res.json()
+ assert cluster_cfg["name"] == new_name
+ new_id = cluster_cfg["id"]
+
+ # Check that the duplicate has the right properties
+ res = client.get(
+ f"/v1/studies/{variant_id}/areas/{area_id}/clusters/renewable/{new_id}",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ )
+ assert res.status_code == 200, res.json()
+ cluster_cfg = res.json()
+ assert cluster_cfg["group"] == "Wind Offshore"
+ assert cluster_cfg["unitCount"] == 15
+ assert cluster_cfg["nominalCapacity"] == 42500
+
+ # Check that the duplicate has the right matrix
+ new_cluster_matrix_path = f"input/renewables/series/{area_id}/{new_id.lower()}/series"
+ res = client.get(
+ f"/v1/studies/{variant_id}/raw",
+ params={"path": new_cluster_matrix_path},
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ )
+ assert res.status_code == 200
+ assert res.json()["data"] == matrix
+
+ # Delete the renewable cluster
+ res = client.delete(
+ f"/v1/studies/{variant_id}/areas/{area_id}/clusters/renewable",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ json=[cluster_id],
+ )
+ assert res.status_code == 204, res.json()
+
+ # Check the list of variant commands
+ res = client.get(
+ f"/v1/studies/{variant_id}/commands",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ )
+ assert res.status_code == 200, res.json()
+ commands = res.json()
+ assert len(commands) == 7
+ actions = [command["action"] for command in commands]
+ assert actions == [
+ "create_area",
+ "create_renewables_cluster",
+ "update_config",
+ "replace_matrix",
+ "create_renewables_cluster",
+ "replace_matrix",
+ "remove_renewables_cluster",
+ ]
diff --git a/tests/integration/study_data_blueprint/test_st_storage.py b/tests/integration/study_data_blueprint/test_st_storage.py
index fdffe5efe1..5f2421d911 100644
--- a/tests/integration/study_data_blueprint/test_st_storage.py
+++ b/tests/integration/study_data_blueprint/test_st_storage.py
@@ -1,5 +1,6 @@
import json
import re
+import typing as t
from unittest.mock import ANY
import numpy as np
@@ -123,14 +124,15 @@ def test_lifecycle__nominal(
# =============================
# updating the matrix of a short-term storage
- array = np.random.rand(8760, 1) * 1000
+ array = np.random.randint(0, 1000, size=(8760, 1))
+ array_list = array.tolist()
res = client.put(
f"/v1/studies/{study_id}/areas/{area_id}/storages/{siemens_battery_id}/series/inflows",
headers={"Authorization": f"Bearer {user_access_token}"},
json={
"index": list(range(array.shape[0])),
"columns": list(range(array.shape[1])),
- "data": array.tolist(),
+ "data": array_list,
},
)
assert res.status_code == 200, res.json()
@@ -231,6 +233,32 @@ def test_lifecycle__nominal(
assert res.status_code == 200, res.json()
assert res.json() == siemens_config
+ # =============================
+ # SHORT-TERM STORAGE DUPLICATION
+ # =============================
+
+ new_name = "Duplicate of Siemens"
+ res = client.post(
+ f"/v1/studies/{study_id}/areas/{area_id}/storages/{siemens_battery_id}",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ params={"newName": new_name},
+ )
+ assert res.status_code in {200, 201}, res.json()
+ # asserts the config is the same
+ duplicated_config = dict(siemens_config)
+ duplicated_config["name"] = new_name # type: ignore
+ duplicated_id = transform_name_to_id(new_name)
+ duplicated_config["id"] = duplicated_id # type: ignore
+ assert res.json() == duplicated_config
+
+ # asserts the matrix has also been duplicated
+ res = client.get(
+ f"/v1/studies/{study_id}/areas/{area_id}/storages/{duplicated_id}/series/inflows",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ )
+ assert res.status_code == 200
+ assert res.json()["data"] == array_list
+
# =============================
# SHORT-TERM STORAGE DELETION
# =============================
@@ -303,25 +331,25 @@ def test_lifecycle__nominal(
assert res.status_code == 200, res.json()
siemens_config = {**DEFAULT_PROPERTIES, **siemens_properties, "id": siemens_battery_id}
grand_maison_config = {**DEFAULT_PROPERTIES, **grand_maison_properties, "id": grand_maison_id}
- assert res.json() == [siemens_config, grand_maison_config]
+ assert res.json() == [duplicated_config, siemens_config, grand_maison_config]
- # We can delete the two short-term storages at once.
+ # We can delete the three short-term storages at once.
res = client.request(
"DELETE",
f"/v1/studies/{study_id}/areas/{area_id}/storages",
headers={"Authorization": f"Bearer {user_access_token}"},
- json=[siemens_battery_id, grand_maison_id],
+ json=[grand_maison_id, duplicated_config["id"]],
)
assert res.status_code == 204, res.json()
assert res.text in {"", "null"} # Old FastAPI versions return 'null'.
- # The list of short-term storages should be empty.
+ # Only one st-storage should remain.
res = client.get(
f"/v1/studies/{study_id}/areas/{area_id}/storages",
headers={"Authorization": f"Bearer {user_access_token}"},
)
assert res.status_code == 200, res.json()
- assert res.json() == []
+ assert len(res.json()) == 1
# ===========================
# SHORT-TERM STORAGE ERRORS
@@ -450,6 +478,30 @@ def test_lifecycle__nominal(
description = obj["description"]
assert bad_study_id in description
+ # Cannot duplicate a fake st-storage
+ unknown_id = "unknown"
+ res = client.post(
+ f"/v1/studies/{study_id}/areas/{area_id}/storages/{unknown_id}",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ params={"newName": "duplicata"},
+ )
+ assert res.status_code == 404, res.json()
+ obj = res.json()
+ assert obj["description"] == f"Fields of storage '{unknown_id}' not found"
+ assert obj["exception"] == "STStorageFieldsNotFoundError"
+
+ # Cannot duplicate with an existing id
+ res = client.post(
+ f"/v1/studies/{study_id}/areas/{area_id}/storages/{siemens_battery_id}",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ params={"newName": siemens_battery.upper()}, # different case, but same ID
+ )
+ assert res.status_code == 409, res.json()
+ obj = res.json()
+ description = obj["description"]
+ assert siemens_battery.lower() in description
+ assert obj["exception"] == "ClusterAlreadyExists"
+
def test__default_values(
self,
client: TestClient,
@@ -632,3 +684,146 @@ def test__default_values(
"initiallevel": 0.0,
}
assert actual == expected
+
+ @pytest.fixture(name="base_study_id")
+ def base_study_id_fixture(self, request: t.Any, client: TestClient, user_access_token: str) -> str:
+ """Prepare a managed study for the variant study tests."""
+ params = request.param
+ res = client.post(
+ "/v1/studies",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ params=params,
+ )
+ assert res.status_code in {200, 201}, res.json()
+ study_id: str = res.json()
+ return study_id
+
+ @pytest.fixture(name="variant_id")
+ def variant_id_fixture(self, request: t.Any, client: TestClient, user_access_token: str, base_study_id: str) -> str:
+ """Prepare a variant study for the variant study tests."""
+ name = request.param
+ res = client.post(
+ f"/v1/studies/{base_study_id}/variants",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ params={"name": name},
+ )
+ assert res.status_code in {200, 201}, res.json()
+ study_id: str = res.json()
+ return study_id
+
+ # noinspection PyTestParametrized
+ @pytest.mark.parametrize("base_study_id", [{"name": "Base Study", "version": 860}], indirect=True)
+ @pytest.mark.parametrize("variant_id", ["Variant Study"], indirect=True)
+ def test_variant_lifecycle(self, client: TestClient, user_access_token: str, variant_id: str) -> None:
+ """
+ In this test, we want to check that short-term storages can be managed
+ in the context of a "variant" study.
+ """
+ # Create an area
+ area_name = "France"
+ res = client.post(
+ f"/v1/studies/{variant_id}/areas",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ json={"name": area_name, "type": "AREA"},
+ )
+ assert res.status_code in {200, 201}, res.json()
+ area_cfg = res.json()
+ area_id = area_cfg["id"]
+
+ # Create a short-term storage
+ cluster_name = "Tesla1"
+ res = client.post(
+ f"/v1/studies/{variant_id}/areas/{area_id}/storages",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ json={
+ "name": cluster_name,
+ "group": "Battery",
+ "injectionNominalCapacity": 4500,
+ "withdrawalNominalCapacity": 4230,
+ "reservoirCapacity": 5700,
+ },
+ )
+ assert res.status_code in {200, 201}, res.json()
+ cluster_id: str = res.json()["id"]
+
+ # Update the short-term storage
+ res = client.patch(
+ f"/v1/studies/{variant_id}/areas/{area_id}/storages/{cluster_id}",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ json={"reservoirCapacity": 5600},
+ )
+ assert res.status_code == 200, res.json()
+ cluster_cfg = res.json()
+ assert cluster_cfg["reservoirCapacity"] == 5600
+
+ # Update the series matrix
+ matrix = np.random.randint(0, 2, size=(8760, 1)).tolist()
+ matrix_path = f"input/st-storage/series/{area_id}/{cluster_id.lower()}/pmax_injection"
+ args = {"target": matrix_path, "matrix": matrix}
+ res = client.post(
+ f"/v1/studies/{variant_id}/commands",
+ json=[{"action": "replace_matrix", "args": args}],
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ )
+ assert res.status_code in {200, 201}, res.json()
+
+ # Duplicate the short-term storage
+ new_name = "Tesla2"
+ res = client.post(
+ f"/v1/studies/{variant_id}/areas/{area_id}/storages/{cluster_id}",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ params={"newName": new_name},
+ )
+ assert res.status_code in {200, 201}, res.json()
+ cluster_cfg = res.json()
+ assert cluster_cfg["name"] == new_name
+ new_id = cluster_cfg["id"]
+
+ # Check that the duplicate has the right properties
+ res = client.get(
+ f"/v1/studies/{variant_id}/areas/{area_id}/storages/{new_id}",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ )
+ assert res.status_code == 200, res.json()
+ cluster_cfg = res.json()
+ assert cluster_cfg["group"] == "Battery"
+ assert cluster_cfg["injectionNominalCapacity"] == 4500
+ assert cluster_cfg["withdrawalNominalCapacity"] == 4230
+ assert cluster_cfg["reservoirCapacity"] == 5600
+
+ # Check that the duplicate has the right matrix
+ new_cluster_matrix_path = f"input/st-storage/series/{area_id}/{new_id.lower()}/pmax_injection"
+ res = client.get(
+ f"/v1/studies/{variant_id}/raw",
+ params={"path": new_cluster_matrix_path},
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ )
+ assert res.status_code == 200
+ assert res.json()["data"] == matrix
+
+ # Delete the short-term storage
+ res = client.delete(
+ f"/v1/studies/{variant_id}/areas/{area_id}/storages",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ json=[cluster_id],
+ )
+ assert res.status_code == 204, res.json()
+
+ # Check the list of variant commands
+ res = client.get(
+ f"/v1/studies/{variant_id}/commands",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ )
+ assert res.status_code == 200, res.json()
+ commands = res.json()
+ assert len(commands) == 7
+ actions = [command["action"] for command in commands]
+ assert actions == [
+ "create_area",
+ "create_st_storage",
+ "update_config",
+ "replace_matrix",
+ "create_st_storage",
+ "replace_matrix",
+ "remove_st_storage",
+ ]
diff --git a/tests/integration/study_data_blueprint/test_thermal.py b/tests/integration/study_data_blueprint/test_thermal.py
index 1890d44acf..9fc7388642 100644
--- a/tests/integration/study_data_blueprint/test_thermal.py
+++ b/tests/integration/study_data_blueprint/test_thermal.py
@@ -29,7 +29,9 @@
"""
import json
import re
+import typing as t
+import numpy as np
import pytest
from starlette.testclient import TestClient
@@ -455,7 +457,23 @@ def test_lifecycle(
# THERMAL CLUSTER MATRICES
# =============================
- # TODO: add unit tests for thermal cluster matrices
+ matrix = np.random.randint(0, 2, size=(8760, 1)).tolist()
+ matrix_path = f"input/thermal/prepro/{area_id}/{fr_gas_conventional_id.lower()}/data"
+ args = {"target": matrix_path, "matrix": matrix}
+ res = client.post(
+ f"/v1/studies/{study_id}/commands",
+ json=[{"action": "replace_matrix", "args": args}],
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ )
+ assert res.status_code in {200, 201}, res.json()
+
+ res = client.get(
+ f"/v1/studies/{study_id}/raw",
+ params={"path": matrix_path},
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ )
+ assert res.status_code == 200
+ assert res.json()["data"] == matrix
# ==================================
# THERMAL CLUSTER LIST / GROUPS
@@ -536,6 +554,34 @@ def test_lifecycle(
assert res.status_code == 200, res.json()
assert res.json() == fr_gas_conventional_cfg
+ # =============================
+ # THERMAL CLUSTER DUPLICATION
+ # =============================
+
+ new_name = "Duplicate of Fr_Gas_Conventional"
+ res = client.post(
+ f"/v1/studies/{study_id}/areas/{area_id}/thermals/{fr_gas_conventional_id}",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ params={"newName": new_name},
+ )
+ assert res.status_code in {200, 201}, res.json()
+ # asserts the config is the same
+ duplicated_config = dict(fr_gas_conventional_cfg)
+ duplicated_config["name"] = new_name
+ duplicated_id = transform_name_to_id(new_name, lower=False)
+ duplicated_config["id"] = duplicated_id
+ assert res.json() == duplicated_config
+
+ # asserts the matrix has also been duplicated
+ new_cluster_matrix_path = f"input/thermal/prepro/{area_id}/{duplicated_id.lower()}/data"
+ res = client.get(
+ f"/v1/studies/{study_id}/raw",
+ params={"path": new_cluster_matrix_path},
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ )
+ assert res.status_code == 200
+ assert res.json()["data"] == matrix
+
# =============================
# THERMAL CLUSTER DELETION
# =============================
@@ -573,18 +619,15 @@ def test_lifecycle(
assert res.status_code == 204, res.json()
assert res.text in {"", "null"} # Old FastAPI versions return 'null'.
- # The list of thermal clusters should be empty.
+ # The list of thermal clusters should not contain the deleted ones.
res = client.get(
f"/v1/studies/{study_id}/areas/{area_id}/clusters/thermal",
headers={"Authorization": f"Bearer {user_access_token}"},
)
assert res.status_code == 200, res.json()
- expected = [
- c
- for c in EXISTING_CLUSTERS
- if transform_name_to_id(c["name"], lower=False) not in [other_cluster_id1, other_cluster_id2]
- ]
- assert res.json() == expected
+ deleted_clusters = [other_cluster_id1, other_cluster_id2, fr_gas_conventional_id]
+ for cluster in res.json():
+ assert transform_name_to_id(cluster["name"], lower=False) not in deleted_clusters
# ===========================
# THERMAL CLUSTER ERRORS
@@ -748,3 +791,172 @@ def test_lifecycle(
obj = res.json()
description = obj["description"]
assert bad_study_id in description
+
+ # Cannot duplicate a fake cluster
+ unknown_id = "unknown"
+ res = client.post(
+ f"/v1/studies/{study_id}/areas/{area_id}/thermals/{unknown_id}",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ params={"newName": "duplicate"},
+ )
+ assert res.status_code == 404, res.json()
+ obj = res.json()
+ assert obj["description"] == f"Cluster: '{unknown_id}' not found"
+ assert obj["exception"] == "ClusterNotFound"
+
+ # Cannot duplicate with an existing id
+ res = client.post(
+ f"/v1/studies/{study_id}/areas/{area_id}/thermals/{duplicated_id}",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ params={"newName": new_name.upper()}, # different case but same ID
+ )
+ assert res.status_code == 409, res.json()
+ obj = res.json()
+ description = obj["description"]
+ assert new_name.upper() in description
+ assert obj["exception"] == "ClusterAlreadyExists"
+
+ @pytest.fixture(name="base_study_id")
+ def base_study_id_fixture(self, request: t.Any, client: TestClient, user_access_token: str) -> str:
+ """Prepare a managed study for the variant study tests."""
+ params = request.param
+ res = client.post(
+ "/v1/studies",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ params=params,
+ )
+ assert res.status_code in {200, 201}, res.json()
+ study_id: str = res.json()
+ return study_id
+
+ @pytest.fixture(name="variant_id")
+ def variant_id_fixture(self, request: t.Any, client: TestClient, user_access_token: str, base_study_id: str) -> str:
+ """Prepare a variant study for the variant study tests."""
+ name = request.param
+ res = client.post(
+ f"/v1/studies/{base_study_id}/variants",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ params={"name": name},
+ )
+ assert res.status_code in {200, 201}, res.json()
+ study_id: str = res.json()
+ return study_id
+
+ # noinspection PyTestParametrized
+ @pytest.mark.parametrize("base_study_id", [{"name": "Base Study", "version": 860}], indirect=True)
+ @pytest.mark.parametrize("variant_id", ["Variant Study"], indirect=True)
+ def test_variant_lifecycle(self, client: TestClient, user_access_token: str, variant_id: str) -> None:
+ """
+ In this test, we want to check that thermal clusters can be managed
+ in the context of a "variant" study.
+ """
+ # Create an area
+ area_name = "France"
+ res = client.post(
+ f"/v1/studies/{variant_id}/areas",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ json={"name": area_name, "type": "AREA"},
+ )
+ assert res.status_code in {200, 201}, res.json()
+ area_cfg = res.json()
+ area_id = area_cfg["id"]
+
+ # Create a thermal cluster
+ cluster_name = "Th1"
+ res = client.post(
+ f"/v1/studies/{variant_id}/areas/{area_id}/clusters/thermal",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ json={
+ "name": cluster_name,
+ "group": "Nuclear",
+ "unitCount": 13,
+ "nominalCapacity": 42500,
+ "marginalCost": 0.1,
+ },
+ )
+ assert res.status_code in {200, 201}, res.json()
+ cluster_id: str = res.json()["id"]
+
+ # Update the thermal cluster
+ res = client.patch(
+ f"/v1/studies/{variant_id}/areas/{area_id}/clusters/thermal/{cluster_id}",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ json={
+ "marginalCost": 0.2,
+ },
+ )
+ assert res.status_code == 200, res.json()
+ cluster_cfg = res.json()
+ assert cluster_cfg["marginalCost"] == 0.2
+
+ # Update the prepro matrix
+ matrix = np.random.randint(0, 2, size=(8760, 1)).tolist()
+ matrix_path = f"input/thermal/prepro/{area_id}/{cluster_id.lower()}/data"
+ args = {"target": matrix_path, "matrix": matrix}
+ res = client.post(
+ f"/v1/studies/{variant_id}/commands",
+ json=[{"action": "replace_matrix", "args": args}],
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ )
+ assert res.status_code in {200, 201}, res.json()
+
+ # Duplicate the thermal cluster
+ new_name = "Th2"
+ res = client.post(
+ f"/v1/studies/{variant_id}/areas/{area_id}/thermals/{cluster_id}",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ params={"newName": new_name},
+ )
+ assert res.status_code in {200, 201}, res.json()
+ cluster_cfg = res.json()
+ assert cluster_cfg["name"] == new_name
+ new_id = cluster_cfg["id"]
+
+ # Check that the duplicate has the right properties
+ res = client.get(
+ f"/v1/studies/{variant_id}/areas/{area_id}/clusters/thermal/{new_id}",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ )
+ assert res.status_code == 200, res.json()
+ cluster_cfg = res.json()
+ assert cluster_cfg["group"] == "Nuclear"
+ assert cluster_cfg["unitCount"] == 13
+ assert cluster_cfg["nominalCapacity"] == 42500
+ assert cluster_cfg["marginalCost"] == 0.2
+
+ # Check that the duplicate has the right matrix
+ new_cluster_matrix_path = f"input/thermal/prepro/{area_id}/{new_id.lower()}/data"
+ res = client.get(
+ f"/v1/studies/{variant_id}/raw",
+ params={"path": new_cluster_matrix_path},
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ )
+ assert res.status_code == 200
+ assert res.json()["data"] == matrix
+
+ # Delete the thermal cluster
+ res = client.delete(
+ f"/v1/studies/{variant_id}/areas/{area_id}/clusters/thermal",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ json=[cluster_id],
+ )
+ assert res.status_code == 204, res.json()
+
+ # Check the list of variant commands
+ res = client.get(
+ f"/v1/studies/{variant_id}/commands",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ )
+ assert res.status_code == 200, res.json()
+ commands = res.json()
+ assert len(commands) == 7
+ actions = [command["action"] for command in commands]
+ assert actions == [
+ "create_area",
+ "create_cluster",
+ "update_config",
+ "replace_matrix",
+ "create_cluster",
+ "replace_matrix",
+ "remove_cluster",
+ ]
From 61d9fc868e61aa2ea8468e0d163a6e7b7b47d1dc Mon Sep 17 00:00:00 2001
From: MartinBelthle <102529366+martinbelthle@users.noreply.github.com>
Date: Sat, 9 Mar 2024 15:49:42 +0100
Subject: [PATCH 05/16] fix(st-storages): use command when updating matrices
(#1971)
Resolves [ANT-1352]
(cherry picked from commit b3f654a658a2309c80512ab294ad724875f4c126)
---
antarest/core/exceptions.py | 24 ++-
.../business/areas/st_storage_management.py | 63 +++++--
docs/CHANGELOG.md | 1 +
.../study_data_blueprint/test_st_storage.py | 169 ++++++++++--------
.../areas/test_st_storage_management.py | 27 ++-
5 files changed, 188 insertions(+), 96 deletions(-)
diff --git a/antarest/core/exceptions.py b/antarest/core/exceptions.py
index 4d6c1a2f5f..1755d001df 100644
--- a/antarest/core/exceptions.py
+++ b/antarest/core/exceptions.py
@@ -34,13 +34,35 @@ class STStorageConfigNotFoundError(HTTPException):
"""Configuration for short-term storage is not found"""
def __init__(self, study_id: str, area_id: str) -> None:
- detail = f"The short-term storage configuration of area '{area_id}' not found:"
+ detail = f"The short-term storage configuration of area '{area_id}' not found"
super().__init__(HTTPStatus.NOT_FOUND, detail)
def __str__(self) -> str:
return self.detail
+class STStorageNotFoundError(HTTPException):
+ """Short-term storage is not found"""
+
+ def __init__(self, study_id: str, area_id: str, st_storage_id: str) -> None:
+ detail = f"Short-term storage '{st_storage_id}' not found in area '{area_id}'"
+ super().__init__(HTTPStatus.NOT_FOUND, detail)
+
+ def __str__(self) -> str:
+ return self.detail
+
+
+class DuplicateSTStorageId(HTTPException):
+ """Exception raised when trying to create a short-term storage with an already existing id."""
+
+ def __init__(self, study_id: str, area_id: str, st_storage_id: str) -> None:
+ detail = f"Short term storage '{st_storage_id}' already exists in area '{area_id}'"
+ super().__init__(HTTPStatus.CONFLICT, detail)
+
+ def __str__(self) -> str:
+ return self.detail
+
+
class UnknownModuleError(Exception):
def __init__(self, message: str) -> None:
super(UnknownModuleError, self).__init__(message)
diff --git a/antarest/study/business/areas/st_storage_management.py b/antarest/study/business/areas/st_storage_management.py
index ca498c030a..7109d8c668 100644
--- a/antarest/study/business/areas/st_storage_management.py
+++ b/antarest/study/business/areas/st_storage_management.py
@@ -8,10 +8,13 @@
from typing_extensions import Literal
from antarest.core.exceptions import (
+ AreaNotFound,
ClusterAlreadyExists,
+ DuplicateSTStorageId,
STStorageConfigNotFoundError,
STStorageFieldsNotFoundError,
STStorageMatrixNotFoundError,
+ STStorageNotFoundError,
)
from antarest.study.business.utils import AllOptionalMetaclass, camel_case_model, execute_or_add_commands
from antarest.study.model import Study
@@ -262,6 +265,7 @@ def create_storage(
"""
file_study = self._get_file_study(study)
storage = form.to_config(study.version)
+ _check_creation_feasibility(file_study, area_id, storage.id)
command = self._make_create_cluster_cmd(area_id, storage)
execute_or_add_commands(
study,
@@ -357,18 +361,11 @@ def update_storage(
"""
study_version = study.version
- # review: reading the configuration poses a problem for variants,
- # because it requires generating a snapshot, which takes time.
- # This reading could be avoided if we don't need the previous values
- # (no cross-field validation, no default values, etc.).
- # In return, we won't be able to return a complete `STStorageOutput` object.
- # So, we need to make sure the frontend doesn't need the missing fields.
- # This missing information could also be a problem for the API users.
- # The solution would be to avoid reading the configuration if the study is a variant
- # (we then use the default values), otherwise, for a RAW study, we read the configuration
- # and update the modified values.
+ # For variants, this method requires generating a snapshot, which takes time.
+ # But sadly, there's no other way to prevent creating wrong commands.
file_study = self._get_file_study(study)
+ _check_update_feasibility(file_study, area_id, storage_id)
path = STORAGE_LIST_PATH.format(area_id=area_id, storage_id=storage_id)
try:
@@ -415,6 +412,9 @@ def delete_storages(
area_id: The area ID of the short-term storage.
storage_ids: IDs list of short-term storages to remove.
"""
+ file_study = self._get_file_study(study)
+ _check_deletion_feasibility(file_study, area_id, storage_ids)
+
command_context = self.storage_service.variant_study_service.command_factory.command_context
for storage_id in storage_ids:
command = RemoveSTStorage(
@@ -422,7 +422,6 @@ def delete_storages(
storage_id=storage_id,
command_context=command_context,
)
- file_study = self._get_file_study(study)
execute_or_add_commands(study, file_study, [command], self.storage_service)
def duplicate_cluster(self, study: Study, area_id: str, source_id: str, new_cluster_name: str) -> STStorageOutput:
@@ -455,6 +454,7 @@ def duplicate_cluster(self, study: Study, area_id: str, source_id: str, new_clus
# Matrix edition
lower_source_id = source_id.lower()
+ # noinspection SpellCheckingInspection
ts_names = ["pmax_injection", "pmax_withdrawal", "lower_rule_curve", "upper_rule_curve", "inflows"]
source_paths = [
STORAGE_SERIES_PATH.format(area_id=area_id, storage_id=lower_source_id, ts_name=ts_name)
@@ -533,8 +533,7 @@ def update_matrix(
ts_name: Name of the time series to update.
ts: Matrix of the time series to update.
"""
- matrix_object = ts.dict()
- self._save_matrix_obj(study, area_id, storage_id, ts_name, matrix_object)
+ self._save_matrix_obj(study, area_id, storage_id, ts_name, ts.data)
def _save_matrix_obj(
self,
@@ -542,13 +541,13 @@ def _save_matrix_obj(
area_id: str,
storage_id: str,
ts_name: STStorageTimeSeries,
- matrix_obj: t.Dict[str, t.Any],
+ matrix_data: t.List[t.List[float]],
) -> None:
- path = STORAGE_SERIES_PATH.format(area_id=area_id, storage_id=storage_id, ts_name=ts_name)
- matrix = matrix_obj["data"]
+ file_study = self._get_file_study(study)
command_context = self.storage_service.variant_study_service.command_factory.command_context
- command = ReplaceMatrix(target=path, matrix=matrix, command_context=command_context)
- execute_or_add_commands(study, self._get_file_study(study), [command], self.storage_service)
+ path = STORAGE_SERIES_PATH.format(area_id=area_id, storage_id=storage_id, ts_name=ts_name)
+ command = ReplaceMatrix(target=path, matrix=matrix_data, command_context=command_context)
+ execute_or_add_commands(study, file_study, [command], self.storage_service)
def validate_matrices(
self,
@@ -593,3 +592,31 @@ def validate_matrices(
# Validation successful
return True
+
+
+def _get_existing_storage_ids(file_study: FileStudy, area_id: str) -> t.Set[str]:
+ try:
+ area = file_study.config.areas[area_id]
+ except KeyError:
+ raise AreaNotFound(area_id) from None
+ else:
+ return {s.id for s in area.st_storages}
+
+
+def _check_deletion_feasibility(file_study: FileStudy, area_id: str, storage_ids: t.Sequence[str]) -> None:
+ existing_ids = _get_existing_storage_ids(file_study, area_id)
+ for storage_id in storage_ids:
+ if storage_id not in existing_ids:
+ raise STStorageNotFoundError(file_study.config.study_id, area_id, storage_id)
+
+
+def _check_update_feasibility(file_study: FileStudy, area_id: str, storage_id: str) -> None:
+ existing_ids = _get_existing_storage_ids(file_study, area_id)
+ if storage_id not in existing_ids:
+ raise STStorageNotFoundError(file_study.config.study_id, area_id, storage_id)
+
+
+def _check_creation_feasibility(file_study: FileStudy, area_id: str, storage_id: str) -> None:
+ existing_ids = _get_existing_storage_ids(file_study, area_id)
+ if storage_id in existing_ids:
+ raise DuplicateSTStorageId(file_study.config.study_id, area_id, storage_id)
diff --git a/docs/CHANGELOG.md b/docs/CHANGELOG.md
index 41a9028209..3a8fb03cd3 100644
--- a/docs/CHANGELOG.md
+++ b/docs/CHANGELOG.md
@@ -12,6 +12,7 @@ v2.16.8 (2024-04-19)
* **st-storages (ui):** correction of incorrect wording between "withdrawal" and "injection" [`#1977`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1977)
* **st-storages (ui):** change matrix titles [`#1994`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1994)
+* **st-storages:** use command when updating matrices [`#1971`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1971)
v2.16.7 (2024-03-05)
--------------------
diff --git a/tests/integration/study_data_blueprint/test_st_storage.py b/tests/integration/study_data_blueprint/test_st_storage.py
index 5f2421d911..161e6417b8 100644
--- a/tests/integration/study_data_blueprint/test_st_storage.py
+++ b/tests/integration/study_data_blueprint/test_st_storage.py
@@ -29,11 +29,9 @@ class TestSTStorage:
which contains the following areas: ["de", "es", "fr", "it"].
"""
+ @pytest.mark.parametrize("study_type", ["raw", "variant"])
def test_lifecycle__nominal(
- self,
- client: TestClient,
- user_access_token: str,
- study_id: str,
+ self, client: TestClient, user_access_token: str, study_id: str, study_type: str
) -> None:
"""
The purpose of this integration test is to test the endpoints
@@ -59,10 +57,15 @@ def test_lifecycle__nominal(
We will test the deletion of short-term storages.
"""
+ # =============================
+ # SET UP
+ # =============================
+ user_headers = {"Authorization": f"Bearer {user_access_token}"}
+
# Upgrade study to version 860
res = client.put(
f"/v1/studies/{study_id}/upgrade",
- headers={"Authorization": f"Bearer {user_access_token}"},
+ headers=user_headers,
params={"target_version": 860},
)
res.raise_for_status()
@@ -70,6 +73,25 @@ def test_lifecycle__nominal(
task = wait_task_completion(client, user_access_token, task_id)
assert task.status == TaskStatus.COMPLETED, task
+ # Copies the study, to convert it into a managed one.
+ res = client.post(
+ f"/v1/studies/{study_id}/copy",
+ headers={"Authorization": f"Bearer {user_access_token}"},
+ params={"dest": "default", "with_outputs": False, "use_task": False}, # type: ignore
+ )
+ assert res.status_code == 201, res.json()
+ study_id = res.json()
+
+ if study_type == "variant":
+ # Create Variant
+ res = client.post(
+ f"/v1/studies/{study_id}/variants",
+ headers=user_headers,
+ params={"name": "Variant 1"},
+ )
+ assert res.status_code in {200, 201}, res.json()
+ study_id = res.json()
+
# =============================
# SHORT-TERM STORAGE CREATION
# =============================
@@ -85,7 +107,7 @@ def test_lifecycle__nominal(
for attempt in attempts:
res = client.post(
f"/v1/studies/{study_id}/areas/{area_id}/storages",
- headers={"Authorization": f"Bearer {user_access_token}"},
+ headers=user_headers,
json=attempt,
)
assert res.status_code == 422, res.json()
@@ -102,7 +124,7 @@ def test_lifecycle__nominal(
}
res = client.post(
f"/v1/studies/{study_id}/areas/{area_id}/storages",
- headers={"Authorization": f"Bearer {user_access_token}"},
+ headers=user_headers,
json=siemens_properties,
)
assert res.status_code == 200, res.json()
@@ -114,7 +136,7 @@ def test_lifecycle__nominal(
# reading the properties of a short-term storage
res = client.get(
f"/v1/studies/{study_id}/areas/{area_id}/storages/{siemens_battery_id}",
- headers={"Authorization": f"Bearer {user_access_token}"},
+ headers=user_headers,
)
assert res.status_code == 200, res.json()
assert res.json() == siemens_config
@@ -128,7 +150,7 @@ def test_lifecycle__nominal(
array_list = array.tolist()
res = client.put(
f"/v1/studies/{study_id}/areas/{area_id}/storages/{siemens_battery_id}/series/inflows",
- headers={"Authorization": f"Bearer {user_access_token}"},
+ headers=user_headers,
json={
"index": list(range(array.shape[0])),
"columns": list(range(array.shape[1])),
@@ -141,7 +163,7 @@ def test_lifecycle__nominal(
# reading the matrix of a short-term storage
res = client.get(
f"/v1/studies/{study_id}/areas/{area_id}/storages/{siemens_battery_id}/series/inflows",
- headers={"Authorization": f"Bearer {user_access_token}"},
+ headers=user_headers,
)
assert res.status_code == 200, res.json()
matrix = res.json()
@@ -151,7 +173,7 @@ def test_lifecycle__nominal(
# validating the matrices of a short-term storage
res = client.get(
f"/v1/studies/{study_id}/areas/{area_id}/storages/{siemens_battery_id}/validate",
- headers={"Authorization": f"Bearer {user_access_token}"},
+ headers=user_headers,
)
assert res.status_code == 200, res.json()
assert res.json() is True
@@ -163,7 +185,7 @@ def test_lifecycle__nominal(
# Reading the list of short-term storages
res = client.get(
f"/v1/studies/{study_id}/areas/{area_id}/storages",
- headers={"Authorization": f"Bearer {user_access_token}"},
+ headers=user_headers,
)
assert res.status_code == 200, res.json()
assert res.json() == [siemens_config]
@@ -171,7 +193,7 @@ def test_lifecycle__nominal(
# updating properties
res = client.patch(
f"/v1/studies/{study_id}/areas/{area_id}/storages/{siemens_battery_id}",
- headers={"Authorization": f"Bearer {user_access_token}"},
+ headers=user_headers,
json={
"name": "New Siemens Battery",
"reservoirCapacity": 2500,
@@ -187,7 +209,7 @@ def test_lifecycle__nominal(
res = client.get(
f"/v1/studies/{study_id}/areas/{area_id}/storages/{siemens_battery_id}",
- headers={"Authorization": f"Bearer {user_access_token}"},
+ headers=user_headers,
)
assert res.status_code == 200, res.json()
assert res.json() == siemens_config
@@ -199,7 +221,7 @@ def test_lifecycle__nominal(
# updating properties
res = client.patch(
f"/v1/studies/{study_id}/areas/{area_id}/storages/{siemens_battery_id}",
- headers={"Authorization": f"Bearer {user_access_token}"},
+ headers=user_headers,
json={
"initialLevel": 0.59,
"reservoirCapacity": 0,
@@ -219,7 +241,7 @@ def test_lifecycle__nominal(
bad_properties = {"efficiency": 2.0}
res = client.patch(
f"/v1/studies/{study_id}/areas/{area_id}/storages/{siemens_battery_id}",
- headers={"Authorization": f"Bearer {user_access_token}"},
+ headers=user_headers,
json=bad_properties,
)
assert res.status_code == 422, res.json()
@@ -228,7 +250,7 @@ def test_lifecycle__nominal(
# The short-term storage properties should not have been updated.
res = client.get(
f"/v1/studies/{study_id}/areas/{area_id}/storages/{siemens_battery_id}",
- headers={"Authorization": f"Bearer {user_access_token}"},
+ headers=user_headers,
)
assert res.status_code == 200, res.json()
assert res.json() == siemens_config
@@ -267,7 +289,7 @@ def test_lifecycle__nominal(
res = client.request(
"DELETE",
f"/v1/studies/{study_id}/areas/{area_id}/storages",
- headers={"Authorization": f"Bearer {user_access_token}"},
+ headers=user_headers,
json=[siemens_battery_id],
)
assert res.status_code == 204, res.json()
@@ -277,7 +299,7 @@ def test_lifecycle__nominal(
res = client.request(
"DELETE",
f"/v1/studies/{study_id}/areas/{area_id}/storages",
- headers={"Authorization": f"Bearer {user_access_token}"},
+ headers=user_headers,
json=[],
)
assert res.status_code == 204, res.json()
@@ -297,7 +319,7 @@ def test_lifecycle__nominal(
}
res = client.post(
f"/v1/studies/{study_id}/areas/{area_id}/storages",
- headers={"Authorization": f"Bearer {user_access_token}"},
+ headers=user_headers,
json=siemens_properties,
)
assert res.status_code == 200, res.json()
@@ -316,7 +338,7 @@ def test_lifecycle__nominal(
}
res = client.post(
f"/v1/studies/{study_id}/areas/{area_id}/storages",
- headers={"Authorization": f"Bearer {user_access_token}"},
+ headers=user_headers,
json=grand_maison_properties,
)
assert res.status_code == 200, res.json()
@@ -326,7 +348,7 @@ def test_lifecycle__nominal(
# Reading the list of short-term storages
res = client.get(
f"/v1/studies/{study_id}/areas/{area_id}/storages",
- headers={"Authorization": f"Bearer {user_access_token}"},
+ headers=user_headers,
)
assert res.status_code == 200, res.json()
siemens_config = {**DEFAULT_PROPERTIES, **siemens_properties, "id": siemens_battery_id}
@@ -337,7 +359,7 @@ def test_lifecycle__nominal(
res = client.request(
"DELETE",
f"/v1/studies/{study_id}/areas/{area_id}/storages",
- headers={"Authorization": f"Bearer {user_access_token}"},
+ headers=user_headers,
json=[grand_maison_id, duplicated_config["id"]],
)
assert res.status_code == 204, res.json()
@@ -346,7 +368,7 @@ def test_lifecycle__nominal(
# Only one st-storage should remain.
res = client.get(
f"/v1/studies/{study_id}/areas/{area_id}/storages",
- headers={"Authorization": f"Bearer {user_access_token}"},
+ headers=user_headers,
)
assert res.status_code == 200, res.json()
assert len(res.json()) == 1
@@ -360,25 +382,21 @@ def test_lifecycle__nominal(
res = client.request(
"DELETE",
f"/v1/studies/{study_id}/areas/{bad_area_id}/storages",
- headers={"Authorization": f"Bearer {user_access_token}"},
+ headers=user_headers,
json=[siemens_battery_id],
)
- assert res.status_code == 500, res.json()
+ assert res.status_code == 404
obj = res.json()
- description = obj["description"]
- assert bad_area_id in description
- assert re.search(
- r"CommandName.REMOVE_ST_STORAGE",
- description,
- flags=re.IGNORECASE,
- )
+
+ assert obj["description"] == f"Area is not found: '{bad_area_id}'"
+ assert obj["exception"] == "AreaNotFound"
# Check delete with the wrong value of `study_id`
bad_study_id = "bad_study"
res = client.request(
"DELETE",
f"/v1/studies/{bad_study_id}/areas/{area_id}/storages",
- headers={"Authorization": f"Bearer {user_access_token}"},
+ headers=user_headers,
json=[siemens_battery_id],
)
obj = res.json()
@@ -389,7 +407,7 @@ def test_lifecycle__nominal(
# Check get with wrong `area_id`
res = client.get(
f"/v1/studies/{study_id}/areas/{bad_area_id}/storages/{siemens_battery_id}",
- headers={"Authorization": f"Bearer {user_access_token}"},
+ headers=user_headers,
)
obj = res.json()
description = obj["description"]
@@ -399,7 +417,7 @@ def test_lifecycle__nominal(
# Check get with wrong `study_id`
res = client.get(
f"/v1/studies/{bad_study_id}/areas/{area_id}/storages/{siemens_battery_id}",
- headers={"Authorization": f"Bearer {user_access_token}"},
+ headers=user_headers,
)
obj = res.json()
description = obj["description"]
@@ -409,7 +427,7 @@ def test_lifecycle__nominal(
# Check POST with wrong `study_id`
res = client.post(
f"/v1/studies/{bad_study_id}/areas/{area_id}/storages",
- headers={"Authorization": f"Bearer {user_access_token}"},
+ headers=user_headers,
json={"name": siemens_battery, "group": "Battery"},
)
obj = res.json()
@@ -420,20 +438,18 @@ def test_lifecycle__nominal(
# Check POST with wrong `area_id`
res = client.post(
f"/v1/studies/{study_id}/areas/{bad_area_id}/storages",
- headers={"Authorization": f"Bearer {user_access_token}"},
+ headers=user_headers,
json={"name": siemens_battery, "group": "Battery"},
)
- assert res.status_code == 500, res.json()
+ assert res.status_code == 404
obj = res.json()
- description = obj["description"]
- assert bad_area_id in description
- assert re.search(r"Area ", description, flags=re.IGNORECASE)
- assert re.search(r"does not exist ", description, flags=re.IGNORECASE)
+ assert obj["description"] == f"Area is not found: '{bad_area_id}'"
+ assert obj["exception"] == "AreaNotFound"
# Check POST with wrong `group`
res = client.post(
f"/v1/studies/{study_id}/areas/{area_id}/storages",
- headers={"Authorization": f"Bearer {user_access_token}"},
+ headers=user_headers,
json={"name": siemens_battery, "group": "GroupFoo"},
)
assert res.status_code == 422, res.json()
@@ -444,33 +460,30 @@ def test_lifecycle__nominal(
# Check PATCH with the wrong `area_id`
res = client.patch(
f"/v1/studies/{study_id}/areas/{bad_area_id}/storages/{siemens_battery_id}",
- headers={"Authorization": f"Bearer {user_access_token}"},
+ headers=user_headers,
json={"efficiency": 1.0},
)
- assert res.status_code == 404, res.json()
+ assert res.status_code == 404
obj = res.json()
- description = obj["description"]
- assert bad_area_id in description
- assert re.search(r"not a child of ", description, flags=re.IGNORECASE)
+ assert obj["description"] == f"Area is not found: '{bad_area_id}'"
+ assert obj["exception"] == "AreaNotFound"
# Check PATCH with the wrong `storage_id`
bad_storage_id = "bad_storage"
res = client.patch(
f"/v1/studies/{study_id}/areas/{area_id}/storages/{bad_storage_id}",
- headers={"Authorization": f"Bearer {user_access_token}"},
+ headers=user_headers,
json={"efficiency": 1.0},
)
- assert res.status_code == 404, res.json()
+ assert res.status_code == 404
obj = res.json()
- description = obj["description"]
- assert bad_storage_id in description
- assert re.search(r"fields of storage", description, flags=re.IGNORECASE)
- assert re.search(r"not found", description, flags=re.IGNORECASE)
+ assert obj["description"] == f"Short-term storage '{bad_storage_id}' not found in area '{area_id}'"
+ assert obj["exception"] == "STStorageNotFoundError"
# Check PATCH with the wrong `study_id`
res = client.patch(
f"/v1/studies/{bad_study_id}/areas/{area_id}/storages/{siemens_battery_id}",
- headers={"Authorization": f"Bearer {user_access_token}"},
+ headers=user_headers,
json={"efficiency": 1.0},
)
assert res.status_code == 404, res.json()
@@ -478,7 +491,7 @@ def test_lifecycle__nominal(
description = obj["description"]
assert bad_study_id in description
- # Cannot duplicate a fake st-storage
+ # Cannot duplicate a unknown st-storage
unknown_id = "unknown"
res = client.post(
f"/v1/studies/{study_id}/areas/{area_id}/storages/{unknown_id}",
@@ -502,11 +515,8 @@ def test_lifecycle__nominal(
assert siemens_battery.lower() in description
assert obj["exception"] == "ClusterAlreadyExists"
- def test__default_values(
- self,
- client: TestClient,
- user_access_token: str,
- ) -> None:
+ @pytest.mark.parametrize("study_type", ["raw", "variant"])
+ def test__default_values(self, client: TestClient, user_access_token: str, study_type: str) -> None:
"""
The purpose of this integration test is to test the default values of
the properties of a short-term storage.
@@ -516,18 +526,29 @@ def test__default_values(
Then the short-term storage is created with initialLevel = 0.0, and initialLevelOptim = False.
"""
# Create a new study in version 860 (or higher)
+ user_headers = {"Authorization": f"Bearer {user_access_token}"}
res = client.post(
"/v1/studies",
- headers={"Authorization": f"Bearer {user_access_token}"},
+ headers=user_headers,
params={"name": "MyStudy", "version": 860},
)
assert res.status_code in {200, 201}, res.json()
study_id = res.json()
+ if study_type == "variant":
+ # Create Variant
+ res = client.post(
+ f"/v1/studies/{study_id}/variants",
+ headers=user_headers,
+ params={"name": "Variant 1"},
+ )
+ assert res.status_code in {200, 201}, res.json()
+ study_id = res.json()
+
# Create a new area named "FR"
res = client.post(
f"/v1/studies/{study_id}/areas",
- headers={"Authorization": f"Bearer {user_access_token}"},
+ headers=user_headers,
json={"name": "FR", "type": "AREA"},
)
assert res.status_code in {200, 201}, res.json()
@@ -537,7 +558,7 @@ def test__default_values(
tesla_battery = "Tesla Battery"
res = client.post(
f"/v1/studies/{study_id}/areas/{area_id}/storages",
- headers={"Authorization": f"Bearer {user_access_token}"},
+ headers=user_headers,
json={"name": tesla_battery, "group": "Battery"},
)
assert res.status_code == 200, res.json()
@@ -549,7 +570,7 @@ def test__default_values(
# are properly set in the configuration file.
res = client.get(
f"/v1/studies/{study_id}/raw",
- headers={"Authorization": f"Bearer {user_access_token}"},
+ headers=user_headers,
params={"path": f"input/st-storage/clusters/{area_id}/list/{tesla_battery_id}"},
)
assert res.status_code == 200, res.json()
@@ -564,7 +585,7 @@ def test__default_values(
# Create a variant of the study
res = client.post(
f"/v1/studies/{study_id}/variants",
- headers={"Authorization": f"Bearer {user_access_token}"},
+ headers=user_headers,
params={"name": "MyVariant"},
)
assert res.status_code in {200, 201}, res.json()
@@ -574,7 +595,7 @@ def test__default_values(
siemens_battery = "Siemens Battery"
res = client.post(
f"/v1/studies/{variant_id}/areas/{area_id}/storages",
- headers={"Authorization": f"Bearer {user_access_token}"},
+ headers=user_headers,
json={"name": siemens_battery, "group": "Battery"},
)
assert res.status_code == 200, res.json()
@@ -582,7 +603,7 @@ def test__default_values(
# Check the variant commands
res = client.get(
f"/v1/studies/{variant_id}/commands",
- headers={"Authorization": f"Bearer {user_access_token}"},
+ headers=user_headers,
)
assert res.status_code == 200, res.json()
commands = res.json()
@@ -608,7 +629,7 @@ def test__default_values(
siemens_battery_id = transform_name_to_id(siemens_battery)
res = client.patch(
f"/v1/studies/{variant_id}/areas/{area_id}/storages/{siemens_battery_id}",
- headers={"Authorization": f"Bearer {user_access_token}"},
+ headers=user_headers,
json={"initialLevel": 0.5},
)
assert res.status_code == 200, res.json()
@@ -616,7 +637,7 @@ def test__default_values(
# Check the variant commands
res = client.get(
f"/v1/studies/{variant_id}/commands",
- headers={"Authorization": f"Bearer {user_access_token}"},
+ headers=user_headers,
)
assert res.status_code == 200, res.json()
commands = res.json()
@@ -636,7 +657,7 @@ def test__default_values(
# Update the initialLevel property of the "Siemens Battery" short-term storage back to 0
res = client.patch(
f"/v1/studies/{variant_id}/areas/{area_id}/storages/{siemens_battery_id}",
- headers={"Authorization": f"Bearer {user_access_token}"},
+ headers=user_headers,
json={"initialLevel": 0.0, "injectionNominalCapacity": 1600},
)
assert res.status_code == 200, res.json()
@@ -644,7 +665,7 @@ def test__default_values(
# Check the variant commands
res = client.get(
f"/v1/studies/{variant_id}/commands",
- headers={"Authorization": f"Bearer {user_access_token}"},
+ headers=user_headers,
)
assert res.status_code == 200, res.json()
commands = res.json()
@@ -671,7 +692,7 @@ def test__default_values(
# are properly set in the configuration file.
res = client.get(
f"/v1/studies/{variant_id}/raw",
- headers={"Authorization": f"Bearer {user_access_token}"},
+ headers=user_headers,
params={"path": f"input/st-storage/clusters/{area_id}/list/{siemens_battery_id}"},
)
assert res.status_code == 200, res.json()
diff --git a/tests/study/business/areas/test_st_storage_management.py b/tests/study/business/areas/test_st_storage_management.py
index 646dc26c78..5c3e7e660c 100644
--- a/tests/study/business/areas/test_st_storage_management.py
+++ b/tests/study/business/areas/test_st_storage_management.py
@@ -11,16 +11,19 @@
from sqlalchemy.orm.session import Session # type: ignore
from antarest.core.exceptions import (
+ AreaNotFound,
STStorageConfigNotFoundError,
STStorageFieldsNotFoundError,
STStorageMatrixNotFoundError,
+ STStorageNotFoundError,
)
from antarest.core.model import PublicMode
from antarest.login.model import Group, User
from antarest.study.business.areas.st_storage_management import STStorageInput, STStorageManager
from antarest.study.model import RawStudy, Study, StudyContentStatus
from antarest.study.storage.rawstudy.ini_reader import IniReader
-from antarest.study.storage.rawstudy.model.filesystem.config.st_storage import STStorageGroup
+from antarest.study.storage.rawstudy.model.filesystem.config.model import Area, FileStudyTreeConfig
+from antarest.study.storage.rawstudy.model.filesystem.config.st_storage import STStorageConfig, STStorageGroup
from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy
from antarest.study.storage.rawstudy.model.filesystem.ini_file_node import IniFileNode
from antarest.study.storage.rawstudy.model.filesystem.root.filestudytree import FileStudyTree
@@ -287,11 +290,29 @@ def test_update_storage__nominal_case(
get_node=Mock(return_value=ini_file_node),
)
+ area = Mock(spec=Area)
+ mock_config = Mock(spec=FileStudyTreeConfig, study_id=study.id)
+ file_study.config = mock_config
+
# Given the following arguments
manager = STStorageManager(study_storage_service)
-
- # Run the method being tested
edit_form = STStorageInput(initial_level=0, initial_level_optim=False)
+
+ # Test behavior for area not in study
+ mock_config.areas = {"fake_area": area}
+ with pytest.raises(AreaNotFound) as ctx:
+ manager.update_storage(study, area_id="West", storage_id="storage1", form=edit_form)
+ assert ctx.value.detail == "Area is not found: 'West'"
+
+ # Test behavior for st_storage not in study
+ mock_config.areas = {"West": area}
+ area.st_storages = [STStorageConfig(name="fake_name", group="battery")]
+ with pytest.raises(STStorageNotFoundError) as ctx:
+ manager.update_storage(study, area_id="West", storage_id="storage1", form=edit_form)
+ assert ctx.value.detail == "Short-term storage 'storage1' not found in area 'West'"
+
+ # Test behavior for nominal case
+ area.st_storages = [STStorageConfig(name="storage1", group="battery")]
manager.update_storage(study, area_id="West", storage_id="storage1", form=edit_form)
# Assert that the storage fields have been updated
From ec006b867c42b6e1f183fcfa4173f5aaa361e158 Mon Sep 17 00:00:00 2001
From: MartinBelthle <102529366+martinbelthle@users.noreply.github.com>
Date: Sat, 9 Mar 2024 20:11:37 +0100
Subject: [PATCH 06/16] fix(variants): avoid Recursive error when creating big
variant tree (#1967)
(cherry picked from commit d90c5ccf0f3706388508b7960a040053a715ec21)
---
.../storage/variantstudy/business/utils.py | 9 ++--
.../study/storage/variantstudy/model/model.py | 53 +++++++++++++++----
docs/CHANGELOG.md | 2 +
.../studies_blueprint/test_synthesis.py | 2 +-
.../variant_blueprint/test_variant_manager.py | 41 ++++++++++++++
5 files changed, 94 insertions(+), 13 deletions(-)
diff --git a/antarest/study/storage/variantstudy/business/utils.py b/antarest/study/storage/variantstudy/business/utils.py
index 6f04601ec5..933c72bed7 100644
--- a/antarest/study/storage/variantstudy/business/utils.py
+++ b/antarest/study/storage/variantstudy/business/utils.py
@@ -52,10 +52,13 @@ def get_or_create_section(json_ini: JSON, section: str) -> JSON:
def remove_none_args(command_dto: CommandDTO) -> CommandDTO:
- if isinstance(command_dto.args, list):
- command_dto.args = [{k: v for k, v in args.items() if v is not None} for args in command_dto.args]
+ args = command_dto.args
+ if isinstance(args, list):
+ command_dto.args = [{k: v for k, v in args.items() if v is not None} for args in args]
+ elif isinstance(args, dict):
+ command_dto.args = {k: v for k, v in args.items() if v is not None}
else:
- command_dto.args = {k: v for k, v in command_dto.args.items() if v is not None}
+ raise TypeError(f"Invalid type for args: {type(args)}")
return command_dto
diff --git a/antarest/study/storage/variantstudy/model/model.py b/antarest/study/storage/variantstudy/model/model.py
index 1e51032ce4..cd478742b4 100644
--- a/antarest/study/storage/variantstudy/model/model.py
+++ b/antarest/study/storage/variantstudy/model/model.py
@@ -1,4 +1,4 @@
-from typing import List, Optional, Tuple, Union
+import typing as t
from pydantic import BaseModel
@@ -7,28 +7,63 @@
class GenerationResultInfoDTO(BaseModel):
+ """
+ Result information of a snapshot generation process.
+
+ Attributes:
+ success: A boolean indicating whether the generation process was successful.
+ details: A list of tuples containing detailed information about the generation process.
+ """
+
success: bool
- details: List[Tuple[str, bool, str]]
+ details: t.MutableSequence[t.Tuple[str, bool, str]]
class CommandDTO(BaseModel):
- id: Optional[str]
+ """
+ This class represents a command.
+
+ Attributes:
+ id: The unique identifier of the command.
+ action: The action to be performed by the command.
+ args: The arguments for the command action.
+ version: The version of the command.
+ """
+
+ id: t.Optional[str]
action: str
- # if args is a list, this mean the command will be mapped to the list of args
- args: Union[List[JSON], JSON]
+ args: t.Union[t.MutableSequence[JSON], JSON]
version: int = 1
class CommandResultDTO(BaseModel):
+ """
+ This class represents the result of a command.
+
+ Attributes:
+ study_id: The unique identifier of the study.
+ id: The unique identifier of the command.
+ success: A boolean indicating whether the command was successful.
+ message: A message detailing the result of the command.
+ """
+
study_id: str
id: str
success: bool
message: str
-class VariantTreeDTO(BaseModel):
- node: StudyMetadataDTO
- children: List["VariantTreeDTO"]
+class VariantTreeDTO:
+ """
+ This class represents a variant tree structure.
+ Attributes:
+ node: The metadata of the study (ID, name, version, etc.).
+ children: A list of variant children.
+ """
-VariantTreeDTO.update_forward_refs()
+ def __init__(self, node: StudyMetadataDTO, children: t.MutableSequence["VariantTreeDTO"]) -> None:
+ # We are intentionally not using Pydantic’s `BaseModel` here to prevent potential
+ # `RecursionError` exceptions that can occur with Pydantic versions before v2.
+ self.node = node
+ self.children = children or []
diff --git a/docs/CHANGELOG.md b/docs/CHANGELOG.md
index 3a8fb03cd3..1308ab7431 100644
--- a/docs/CHANGELOG.md
+++ b/docs/CHANGELOG.md
@@ -13,6 +13,8 @@ v2.16.8 (2024-04-19)
* **st-storages (ui):** correction of incorrect wording between "withdrawal" and "injection" [`#1977`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1977)
* **st-storages (ui):** change matrix titles [`#1994`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1994)
* **st-storages:** use command when updating matrices [`#1971`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1971)
+* **variants:** avoid recursive error when creating big variant tree [`#1967`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1967)
+
v2.16.7 (2024-03-05)
--------------------
diff --git a/tests/integration/studies_blueprint/test_synthesis.py b/tests/integration/studies_blueprint/test_synthesis.py
index 70f5f0c907..aa6141e782 100644
--- a/tests/integration/studies_blueprint/test_synthesis.py
+++ b/tests/integration/studies_blueprint/test_synthesis.py
@@ -58,7 +58,7 @@ def test_raw_study(
)
assert res.status_code == 200, res.json()
duration = time.time() - start
- assert 0 <= duration <= 0.1, f"Duration is {duration} seconds"
+ assert 0 <= duration <= 0.3, f"Duration is {duration} seconds"
def test_variant_study(
self,
diff --git a/tests/integration/variant_blueprint/test_variant_manager.py b/tests/integration/variant_blueprint/test_variant_manager.py
index 5af256dbbe..9d5be37e2b 100644
--- a/tests/integration/variant_blueprint/test_variant_manager.py
+++ b/tests/integration/variant_blueprint/test_variant_manager.py
@@ -186,3 +186,44 @@ def test_variant_manager(client: TestClient, admin_access_token: str, study_id:
res = client.get(f"/v1/studies/{variant_id}", headers=admin_headers)
assert res.status_code == 404
+
+
+def test_comments(client: TestClient, admin_access_token: str, variant_id: str) -> None:
+ admin_headers = {"Authorization": f"Bearer {admin_access_token}"}
+
+ # Put comments
+ comment = "updated comment"
+ res = client.put(f"/v1/studies/{variant_id}/comments", json={"comments": comment}, headers=admin_headers)
+ assert res.status_code == 204
+
+ # Asserts comments are updated
+ res = client.get(f"/v1/studies/{variant_id}/comments", headers=admin_headers)
+ assert res.json() == comment
+
+ # Generates the study
+ res = client.put(f"/v1/studies/{variant_id}/generate?denormalize=false&from_scratch=true", headers=admin_headers)
+ task_id = res.json()
+ # Wait for task completion
+ res = client.get(f"/v1/tasks/{task_id}", headers=admin_headers, params={"wait_for_completion": True})
+ assert res.status_code == 200
+ task_result = TaskDTO.parse_obj(res.json())
+ assert task_result.status == TaskStatus.COMPLETED
+ assert task_result.result is not None
+ assert task_result.result.success
+
+ # Asserts comments did not disappear
+ res = client.get(f"/v1/studies/{variant_id}/comments", headers=admin_headers)
+ assert res.json() == comment
+
+
+def test_recursive_variant_tree(client: TestClient, admin_access_token: str):
+ admin_headers = {"Authorization": f"Bearer {admin_access_token}"}
+ base_study_res = client.post("/v1/studies?name=foo", headers=admin_headers)
+ base_study_id = base_study_res.json()
+ parent_id = base_study_res.json()
+ for k in range(150):
+ res = client.post(f"/v1/studies/{base_study_id}/variants?name=variant_{k}", headers=admin_headers)
+ base_study_id = res.json()
+ # Asserts that we do not trigger a Recursive Exception
+ res = client.get(f"/v1/studies/{parent_id}/variants", headers=admin_headers)
+ assert res.status_code == 200
From 88d4930924451e8514f909cf563e06303bdc5383 Mon Sep 17 00:00:00 2001
From: belthlemar
Date: Thu, 29 Feb 2024 15:12:48 +0100
Subject: [PATCH 07/16] fix(outputs): build outputs config even when using
cache (#1958)
(cherry picked from commit 07cf7cac22554b3c6cd63c5eb66766ccb5fbd2ea)
---
.../rawstudy/model/filesystem/config/files.py | 4 +-
.../rawstudy/model/filesystem/factory.py | 5 +-
docs/CHANGELOG.md | 2 +-
.../variant_blueprint/test_variant_manager.py | 80 +++++++++++++++++--
.../filesystem/config/test_config_files.py | 4 +-
tests/variantstudy/conftest.py | 15 ++++
tests/variantstudy/test_command_factory.py | 7 ++
7 files changed, 104 insertions(+), 13 deletions(-)
diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/files.py b/antarest/study/storage/rawstudy/model/filesystem/config/files.py
index 3727f320ec..3248b6560a 100644
--- a/antarest/study/storage/rawstudy/model/filesystem/config/files.py
+++ b/antarest/study/storage/rawstudy/model/filesystem/config/files.py
@@ -74,7 +74,7 @@ def build(study_path: Path, study_id: str, output_path: t.Optional[Path] = None)
version=_parse_version(study_path),
areas=_parse_areas(study_path),
sets=_parse_sets(study_path),
- outputs=_parse_outputs(outputs_dir),
+ outputs=parse_outputs(outputs_dir),
bindings=_parse_bindings(study_path),
store_new_set=sns,
archive_input_series=asi,
@@ -232,7 +232,7 @@ def _parse_areas(root: Path) -> t.Dict[str, Area]:
return {transform_name_to_id(a): parse_area(root, a) for a in areas}
-def _parse_outputs(output_path: Path) -> t.Dict[str, Simulation]:
+def parse_outputs(output_path: Path) -> t.Dict[str, Simulation]:
if not output_path.is_dir():
return {}
sims = {}
diff --git a/antarest/study/storage/rawstudy/model/filesystem/factory.py b/antarest/study/storage/rawstudy/model/filesystem/factory.py
index 1899ec1bb4..040e747629 100644
--- a/antarest/study/storage/rawstudy/model/filesystem/factory.py
+++ b/antarest/study/storage/rawstudy/model/filesystem/factory.py
@@ -10,7 +10,7 @@
from antarest.core.interfaces.cache import CacheConstants, ICache
from antarest.matrixstore.service import ISimpleMatrixService
from antarest.matrixstore.uri_resolver_service import UriResolverService
-from antarest.study.storage.rawstudy.model.filesystem.config.files import build
+from antarest.study.storage.rawstudy.model.filesystem.config.files import build, parse_outputs
from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig, FileStudyTreeConfigDTO
from antarest.study.storage.rawstudy.model.filesystem.context import ContextServer
from antarest.study.storage.rawstudy.model.filesystem.root.filestudytree import FileStudyTree
@@ -93,6 +93,9 @@ def _create_from_fs_unsafe(
if from_cache is not None:
logger.info(f"Study {study_id} read from cache")
config = FileStudyTreeConfigDTO.parse_obj(from_cache).to_build_config()
+ if output_path:
+ config.output_path = output_path
+ config.outputs = parse_outputs(output_path)
return FileStudy(config, FileStudyTree(self.context, config))
start_time = time.time()
config = build(path, study_id, output_path)
diff --git a/docs/CHANGELOG.md b/docs/CHANGELOG.md
index 1308ab7431..0225f35c72 100644
--- a/docs/CHANGELOG.md
+++ b/docs/CHANGELOG.md
@@ -14,7 +14,7 @@ v2.16.8 (2024-04-19)
* **st-storages (ui):** change matrix titles [`#1994`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1994)
* **st-storages:** use command when updating matrices [`#1971`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1971)
* **variants:** avoid recursive error when creating big variant tree [`#1967`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1967)
-
+* **outputs:** build outputs config even when using cache [`#1958`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1958)
v2.16.7 (2024-03-05)
--------------------
diff --git a/tests/integration/variant_blueprint/test_variant_manager.py b/tests/integration/variant_blueprint/test_variant_manager.py
index 9d5be37e2b..1ef02ce318 100644
--- a/tests/integration/variant_blueprint/test_variant_manager.py
+++ b/tests/integration/variant_blueprint/test_variant_manager.py
@@ -1,8 +1,11 @@
+import io
import logging
+import time
from starlette.testclient import TestClient
from antarest.core.tasks.model import TaskDTO, TaskStatus
+from tests.integration.assets import ASSETS_DIR
def test_variant_manager(client: TestClient, admin_access_token: str, study_id: str, caplog) -> None:
@@ -216,14 +219,77 @@ def test_comments(client: TestClient, admin_access_token: str, variant_id: str)
assert res.json() == comment
-def test_recursive_variant_tree(client: TestClient, admin_access_token: str):
+def test_recursive_variant_tree(client: TestClient, admin_access_token: str, base_study_id: str) -> None:
admin_headers = {"Authorization": f"Bearer {admin_access_token}"}
- base_study_res = client.post("/v1/studies?name=foo", headers=admin_headers)
- base_study_id = base_study_res.json()
- parent_id = base_study_res.json()
- for k in range(150):
- res = client.post(f"/v1/studies/{base_study_id}/variants?name=variant_{k}", headers=admin_headers)
+ parent_id = base_study_id
+ for k in range(200):
+ res = client.post(
+ f"/v1/studies/{base_study_id}/variants",
+ headers=admin_headers,
+ params={"name": f"variant_{k}"},
+ )
base_study_id = res.json()
+
# Asserts that we do not trigger a Recursive Exception
res = client.get(f"/v1/studies/{parent_id}/variants", headers=admin_headers)
- assert res.status_code == 200
+ assert res.status_code == 200, res.json()
+
+
+def test_outputs(client: TestClient, admin_access_token: str, variant_id: str, tmp_path: str) -> None:
+ # =======================
+ # SET UP
+ # =======================
+
+ admin_headers = {"Authorization": f"Bearer {admin_access_token}"}
+
+ # Only done to generate the variant folder
+ res = client.post(f"/v1/launcher/run/{variant_id}", headers=admin_headers)
+ res.raise_for_status()
+ job_id = res.json()["job_id"]
+
+ status = client.get(f"/v1/launcher/jobs/{job_id}", headers=admin_headers).json()["status"]
+ while status != "failed":
+ time.sleep(0.2)
+ status = client.get(f"/v1/launcher/jobs/{job_id}", headers=admin_headers).json()["status"]
+
+ # Import an output to the study folder
+ output_path_zip = ASSETS_DIR / "output_adq.zip"
+ res = client.post(
+ f"/v1/studies/{variant_id}/output",
+ headers=admin_headers,
+ files={"output": io.BytesIO(output_path_zip.read_bytes())},
+ )
+ res.raise_for_status()
+
+ # =======================
+ # ASSERTS GENERATING THE VARIANT DOES NOT `HIDE` OUTPUTS FROM THE ENDPOINT
+ # =======================
+
+ # Get output
+ res = client.get(f"/v1/studies/{variant_id}/outputs", headers=admin_headers)
+ assert res.status_code == 200, res.json()
+ outputs = res.json()
+ assert len(outputs) == 1
+
+ # Generates the study
+ res = client.put(
+ f"/v1/studies/{variant_id}/generate",
+ headers=admin_headers,
+ params={"denormalize": False, "from_scratch": True},
+ )
+ res.raise_for_status()
+ task_id = res.json()
+
+ # Wait for task completion
+ res = client.get(f"/v1/tasks/{task_id}", headers=admin_headers, params={"wait_for_completion": True})
+ res.raise_for_status()
+ task_result = TaskDTO.parse_obj(res.json())
+ assert task_result.status == TaskStatus.COMPLETED
+ assert task_result.result is not None
+ assert task_result.result.success
+
+ # Get outputs again
+ res = client.get(f"/v1/studies/{variant_id}/outputs", headers=admin_headers)
+ assert res.status_code == 200, res.json()
+ outputs = res.json()
+ assert len(outputs) == 1
diff --git a/tests/storage/repository/filesystem/config/test_config_files.py b/tests/storage/repository/filesystem/config/test_config_files.py
index 4f88115291..a8d8d2fecc 100644
--- a/tests/storage/repository/filesystem/config/test_config_files.py
+++ b/tests/storage/repository/filesystem/config/test_config_files.py
@@ -11,12 +11,12 @@
)
from antarest.study.storage.rawstudy.model.filesystem.config.files import (
_parse_links,
- _parse_outputs,
_parse_renewables,
_parse_sets,
_parse_st_storage,
_parse_thermal,
build,
+ parse_outputs,
)
from antarest.study.storage.rawstudy.model.filesystem.config.model import (
Area,
@@ -222,7 +222,7 @@ def test_parse_outputs__nominal(tmp_path: Path, assets_name: str, expected: Dict
with ZipFile(pkg_dir) as zf:
zf.extractall(tmp_path)
output_path = tmp_path.joinpath("output")
- actual = _parse_outputs(output_path)
+ actual = parse_outputs(output_path)
assert actual == expected
diff --git a/tests/variantstudy/conftest.py b/tests/variantstudy/conftest.py
index 011a6bb68d..f8dd8f2ebd 100644
--- a/tests/variantstudy/conftest.py
+++ b/tests/variantstudy/conftest.py
@@ -70,6 +70,21 @@ def delete(matrix_id: str) -> None:
"""
del matrix_map[matrix_id]
+ def get_matrix_id(matrix: t.Union[t.List[t.List[float]], str]) -> str:
+ """
+ Get the matrix ID from a matrix or a matrix link.
+ """
+ if isinstance(matrix, str):
+ # str.removeprefix() is not available in Python 3.8
+ prefix = "matrix://"
+ if matrix.startswith(prefix):
+ return matrix[len(prefix) :]
+ return matrix
+ elif isinstance(matrix, list):
+ return create(matrix)
+ else:
+ raise TypeError(f"Invalid type for matrix: {type(matrix)}")
+
matrix_service = Mock(spec=MatrixService)
matrix_service.create.side_effect = create
matrix_service.get.side_effect = get
diff --git a/tests/variantstudy/test_command_factory.py b/tests/variantstudy/test_command_factory.py
index aac2be6c59..10e62dc036 100644
--- a/tests/variantstudy/test_command_factory.py
+++ b/tests/variantstudy/test_command_factory.py
@@ -402,6 +402,13 @@ def setup_class(self):
)
@pytest.mark.unit_test
def test_command_factory(self, command_dto: CommandDTO):
+ def get_matrix_id(matrix: str) -> str:
+ # str.removeprefix() is not available in Python 3.8
+ prefix = "matrix://"
+ if matrix.startswith(prefix):
+ return matrix[len(prefix) :]
+ return matrix
+
command_factory = CommandFactory(
generator_matrix_constants=Mock(spec=GeneratorMatrixConstants),
matrix_service=Mock(spec=MatrixService),
From d75138c15e9abcfd8659666ab83d8c10b97eb6eb Mon Sep 17 00:00:00 2001
From: MartinBelthle <102529366+martinbelthle@users.noreply.github.com>
Date: Tue, 5 Mar 2024 18:03:01 +0100
Subject: [PATCH 08/16] fix(comments): use a command to update comments on a
variant (#1959)
Co-authored-by: Laurent LAPORTE
(cherry picked from commit f7f082a0e836c81d0791fbd316eeb324a1d99088)
---
antarest/study/service.py | 34 +++++++-------
antarest/study/storage/storage_service.py | 11 +----
.../model/command/update_raw_file.py | 9 ++++
docs/CHANGELOG.md | 2 +
.../variant_blueprint/test_variant_manager.py | 44 ++++++++++++++-----
5 files changed, 61 insertions(+), 39 deletions(-)
diff --git a/antarest/study/service.py b/antarest/study/service.py
index ae86fe62ae..7954b4c5dc 100644
--- a/antarest/study/service.py
+++ b/antarest/study/service.py
@@ -80,7 +80,6 @@
MatrixIndex,
PatchArea,
PatchCluster,
- PatchStudy,
RawStudy,
Study,
StudyAdditionalData,
@@ -110,6 +109,7 @@
upgrade_study,
)
from antarest.study.storage.utils import assert_permission, get_start_date, is_managed, remove_from_cache
+from antarest.study.storage.variantstudy.business.utils import transform_command_to_dto
from antarest.study.storage.variantstudy.model.command.icommand import ICommand
from antarest.study.storage.variantstudy.model.command.replace_matrix import ReplaceMatrix
from antarest.study.storage.variantstudy.model.command.update_comments import UpdateComments
@@ -383,17 +383,7 @@ def get_comments(self, study_id: str, params: RequestParameters) -> t.Union[str,
study = self.get_study(study_id)
assert_permission(params.user, study, StudyPermissionType.READ)
- output: t.Union[str, JSON]
- raw_study_service = self.storage_service.raw_study_service
- variant_study_service = self.storage_service.variant_study_service
- if isinstance(study, RawStudy):
- output = raw_study_service.get(metadata=study, url="/settings/comments")
- elif isinstance(study, VariantStudy):
- patch = raw_study_service.patch_service.get(study)
- patch_study = PatchStudy() if patch.study is None else patch.study
- output = patch_study.comments or variant_study_service.get(metadata=study, url="/settings/comments")
- else:
- raise StudyTypeUnsupported(study.id, study.type)
+ output = self.storage_service.get_storage(study).get(metadata=study, url="/settings/comments")
with contextlib.suppress(AttributeError, UnicodeDecodeError):
output = output.decode("utf-8") # type: ignore
@@ -428,14 +418,20 @@ def edit_comments(
new=bytes(data.comments, "utf-8"),
params=params,
)
- elif isinstance(study, VariantStudy):
- patch = self.storage_service.raw_study_service.patch_service.get(study)
- patch_study = patch.study or PatchStudy()
- patch_study.comments = data.comments
- patch.study = patch_study
- self.storage_service.raw_study_service.patch_service.save(study, patch)
else:
- raise StudyTypeUnsupported(study.id, study.type)
+ variant_study_service = self.storage_service.variant_study_service
+ command = [
+ UpdateRawFile(
+ target="settings/comments",
+ b64Data=base64.b64encode(data.comments.encode("utf-8")).decode("utf-8"),
+ command_context=variant_study_service.command_factory.command_context,
+ )
+ ]
+ variant_study_service.append_commands(
+ study.id,
+ transform_command_to_dto(command, force_aggregate=True),
+ RequestParameters(user=params.user),
+ )
def get_studies_information(
self,
diff --git a/antarest/study/storage/storage_service.py b/antarest/study/storage/storage_service.py
index affe97eae1..599e948948 100644
--- a/antarest/study/storage/storage_service.py
+++ b/antarest/study/storage/storage_service.py
@@ -5,7 +5,6 @@
from typing import Union
-from antarest.core.exceptions import StudyTypeUnsupported
from antarest.study.common.studystorage import IStudyStorageService
from antarest.study.model import RawStudy, Study
from antarest.study.storage.rawstudy.raw_study_service import RawStudyService
@@ -49,13 +48,5 @@ def get_storage(self, study: Study) -> IStudyStorageService[Union[RawStudy, Vari
Returns:
The study storage service associated with the study type.
-
- Raises:
- StudyTypeUnsupported: If the study type is not supported by the available storage services.
"""
- if isinstance(study, RawStudy):
- return self.raw_study_service
- elif isinstance(study, VariantStudy):
- return self.variant_study_service
- else:
- raise StudyTypeUnsupported(study.id, study.type)
+ return self.raw_study_service if isinstance(study, RawStudy) else self.variant_study_service
diff --git a/antarest/study/storage/variantstudy/model/command/update_raw_file.py b/antarest/study/storage/variantstudy/model/command/update_raw_file.py
index c4b6cfb46b..3e7b3b8759 100644
--- a/antarest/study/storage/variantstudy/model/command/update_raw_file.py
+++ b/antarest/study/storage/variantstudy/model/command/update_raw_file.py
@@ -26,6 +26,15 @@ class UpdateRawFile(ICommand):
target: str
b64Data: str
+ def __repr__(self) -> str:
+ cls = self.__class__.__name__
+ target = self.target
+ try:
+ data = base64.decodebytes(self.b64Data.encode("utf-8")).decode("utf-8")
+ return f"{cls}(target={target!r}, data={data!r})"
+ except (ValueError, TypeError):
+ return f"{cls}(target={target!r}, b64Data={self.b64Data!r})"
+
def _apply_config(self, study_data: FileStudyTreeConfig) -> Tuple[CommandOutput, Dict[str, Any]]:
return CommandOutput(status=True, message="ok"), {}
diff --git a/docs/CHANGELOG.md b/docs/CHANGELOG.md
index 0225f35c72..89c3218b06 100644
--- a/docs/CHANGELOG.md
+++ b/docs/CHANGELOG.md
@@ -15,6 +15,8 @@ v2.16.8 (2024-04-19)
* **st-storages:** use command when updating matrices [`#1971`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1971)
* **variants:** avoid recursive error when creating big variant tree [`#1967`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1967)
* **outputs:** build outputs config even when using cache [`#1958`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1958)
+* **comments:** use a command to update comments on a variant [`#1959`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1959)
+
v2.16.7 (2024-03-05)
--------------------
diff --git a/tests/integration/variant_blueprint/test_variant_manager.py b/tests/integration/variant_blueprint/test_variant_manager.py
index 1ef02ce318..a0e4a68108 100644
--- a/tests/integration/variant_blueprint/test_variant_manager.py
+++ b/tests/integration/variant_blueprint/test_variant_manager.py
@@ -1,24 +1,48 @@
import io
import logging
import time
+import typing as t
+import pytest
from starlette.testclient import TestClient
from antarest.core.tasks.model import TaskDTO, TaskStatus
from tests.integration.assets import ASSETS_DIR
-def test_variant_manager(client: TestClient, admin_access_token: str, study_id: str, caplog) -> None:
+@pytest.fixture(name="base_study_id")
+def base_study_id_fixture(client: TestClient, admin_access_token: str, caplog: t.Any) -> str:
+ """Create a base study and return its ID."""
+ admin_headers = {"Authorization": f"Bearer {admin_access_token}"}
with caplog.at_level(level=logging.WARNING):
- admin_headers = {"Authorization": f"Bearer {admin_access_token}"}
-
- base_study_res = client.post("/v1/studies?name=foo", headers=admin_headers)
+ res = client.post("/v1/studies?name=Base1", headers=admin_headers)
+ return t.cast(str, res.json())
+
+
+@pytest.fixture(name="variant_id")
+def variant_id_fixture(
+ client: TestClient,
+ admin_access_token: str,
+ base_study_id: str,
+ caplog: t.Any,
+) -> str:
+ """Create a variant and return its ID."""
+ admin_headers = {"Authorization": f"Bearer {admin_access_token}"}
+ with caplog.at_level(level=logging.WARNING):
+ res = client.post(f"/v1/studies/{base_study_id}/variants?name=Variant1", headers=admin_headers)
+ return t.cast(str, res.json())
- base_study_id = base_study_res.json()
- res = client.post(f"/v1/studies/{base_study_id}/variants?name=foo", headers=admin_headers)
- variant_id = res.json()
+def test_variant_manager(
+ client: TestClient,
+ admin_access_token: str,
+ base_study_id: str,
+ variant_id: str,
+ caplog: t.Any,
+) -> None:
+ admin_headers = {"Authorization": f"Bearer {admin_access_token}"}
+ with caplog.at_level(level=logging.WARNING):
client.post(f"/v1/launcher/run/{variant_id}", headers=admin_headers)
res = client.get(f"v1/studies/{variant_id}/synthesis", headers=admin_headers)
@@ -29,9 +53,9 @@ def test_variant_manager(client: TestClient, admin_access_token: str, study_id:
client.post(f"/v1/studies/{variant_id}/variants?name=baz", headers=admin_headers)
res = client.get(f"/v1/studies/{base_study_id}/variants", headers=admin_headers)
children = res.json()
- assert children["node"]["name"] == "foo"
+ assert children["node"]["name"] == "Base1"
assert len(children["children"]) == 1
- assert children["children"][0]["node"]["name"] == "foo"
+ assert children["children"][0]["node"]["name"] == "Variant1"
assert len(children["children"][0]["children"]) == 2
assert children["children"][0]["children"][0]["node"]["name"] == "bar"
assert children["children"][0]["children"][1]["node"]["name"] == "baz"
@@ -172,7 +196,7 @@ def test_variant_manager(client: TestClient, admin_access_token: str, study_id:
res = client.post(f"/v1/studies/{variant_id}/freeze?name=bar", headers=admin_headers)
assert res.status_code == 500
- new_study_id = "newid"
+ new_study_id = "new_id"
res = client.get(f"/v1/studies/{new_study_id}", headers=admin_headers)
assert res.status_code == 404
From e6b282b6ab4e9bc03baad134a6402c22aaaeef7b Mon Sep 17 00:00:00 2001
From: hatim dinia
Date: Mon, 11 Mar 2024 10:27:18 +0100
Subject: [PATCH 09/16] fix(outputs-ui): correct weekly data formatting to
support 53-week years
(cherry picked from commit 70bd975788b46738870465f3aa1002a1a2107c1e)
---
docs/CHANGELOG.md | 1 +
.../Singlestudy/explore/Results/ResultDetails/index.tsx | 8 ++++++--
2 files changed, 7 insertions(+), 2 deletions(-)
diff --git a/docs/CHANGELOG.md b/docs/CHANGELOG.md
index 89c3218b06..a64b8ad2a5 100644
--- a/docs/CHANGELOG.md
+++ b/docs/CHANGELOG.md
@@ -16,6 +16,7 @@ v2.16.8 (2024-04-19)
* **variants:** avoid recursive error when creating big variant tree [`#1967`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1967)
* **outputs:** build outputs config even when using cache [`#1958`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1958)
* **comments:** use a command to update comments on a variant [`#1959`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1959)
+* **outputs (ui):** correct weekly data formatting to support 53-week years [`#1975`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1975)
v2.16.7 (2024-03-05)
diff --git a/webapp/src/components/App/Singlestudy/explore/Results/ResultDetails/index.tsx b/webapp/src/components/App/Singlestudy/explore/Results/ResultDetails/index.tsx
index e1c3748b73..763405900a 100644
--- a/webapp/src/components/App/Singlestudy/explore/Results/ResultDetails/index.tsx
+++ b/webapp/src/components/App/Singlestudy/explore/Results/ResultDetails/index.tsx
@@ -158,11 +158,16 @@ function ResultDetails() {
return ["Annual"];
}
+ // Directly use API's week index (handles 53 weeks) as no formatting is required.
+ // !NOTE: Suboptimal: Assumes API consistency, lacks flexibility.
+ if (timestep === Timestep.Weekly) {
+ return matrixRes.data.index.map((weekNumber) => weekNumber.toString());
+ }
+
// Original date/time format mapping for moment parsing
const parseFormat = {
[Timestep.Hourly]: "MM/DD HH:mm",
[Timestep.Daily]: "MM/DD",
- [Timestep.Weekly]: "WW",
[Timestep.Monthly]: "MM",
}[timestep];
@@ -170,7 +175,6 @@ function ResultDetails() {
const outputFormat = {
[Timestep.Hourly]: "DD MMM HH:mm I",
[Timestep.Daily]: "DD MMM I",
- [Timestep.Weekly]: "WW",
[Timestep.Monthly]: "MMM",
}[timestep];
From 3c1d64eb9198ae738f68c7e5c5261965937c904f Mon Sep 17 00:00:00 2001
From: Laurent LAPORTE
Date: Thu, 28 Mar 2024 17:04:53 +0100
Subject: [PATCH 10/16] fix(configuration): add missing variables in Thematic
Trimming for studies in v8.6 or above (#1992)
---
.../business/thematic_trimming_field_infos.py | 229 +++++++++++++++
.../business/thematic_trimming_management.py | 156 +---------
antarest/study/web/study_data_blueprint.py | 2 +-
docs/CHANGELOG.md | 1 +
.../studies_blueprint/test_comments.py | 2 +-
.../studies_blueprint/test_synthesis.py | 2 +-
tests/integration/test_integration.py | 266 +++++++++++-------
tests/integration/test_integration_watcher.py | 10 +-
tests/launcher/test_web.py | 2 +-
tests/login/test_model.py | 3 -
tests/login/test_repository.py | 2 +-
tests/storage/business/test_config_manager.py | 4 +-
.../business/test_variant_study_service.py | 2 +-
.../repository/filesystem/test_lazy_node.py | 2 +-
.../variantstudy/test_snapshot_generator.py | 1 -
.../test_variant_study_service.py | 1 -
.../model/command/test_create_cluster.py | 6 +-
.../model/command/test_create_link.py | 4 +-
.../command/test_create_renewables_cluster.py | 2 +-
webapp/src/common/types.ts | 67 +----
.../dialogs/ThematicTrimmingDialog/utils.ts | 68 ++++-
21 files changed, 480 insertions(+), 352 deletions(-)
create mode 100644 antarest/study/business/thematic_trimming_field_infos.py
diff --git a/antarest/study/business/thematic_trimming_field_infos.py b/antarest/study/business/thematic_trimming_field_infos.py
new file mode 100644
index 0000000000..30d95a9393
--- /dev/null
+++ b/antarest/study/business/thematic_trimming_field_infos.py
@@ -0,0 +1,229 @@
+"""
+List of fields of the Thematic Trimming panel
+"""
+
+import typing as t
+
+from antarest.study.business.utils import AllOptionalMetaclass, FormFieldsBaseModel
+
+
+class ThematicTrimmingFormFields(FormFieldsBaseModel, metaclass=AllOptionalMetaclass, use_none=True):
+ """
+ This class manages the configuration of result filtering in a simulation.
+
+ This table allows the user to enable or disable specific variables before running a simulation.
+ """
+
+ ov_cost: bool
+ op_cost: bool
+ mrg_price: bool
+ co2_emis: bool
+ dtg_by_plant: bool
+ balance: bool
+ row_bal: bool
+ psp: bool
+ misc_ndg: bool
+ load: bool
+ h_ror: bool
+ wind: bool
+ solar: bool
+ nuclear: bool
+ lignite: bool
+ coal: bool
+ gas: bool
+ oil: bool
+ mix_fuel: bool
+ misc_dtg: bool
+ h_stor: bool
+ h_pump: bool
+ h_lev: bool
+ h_infl: bool
+ h_ovfl: bool
+ h_val: bool
+ h_cost: bool
+ unsp_enrg: bool
+ spil_enrg: bool
+ lold: bool
+ lolp: bool
+ avl_dtg: bool
+ dtg_mrg: bool
+ max_mrg: bool
+ np_cost: bool
+ np_cost_by_plant: bool
+ nodu: bool
+ nodu_by_plant: bool
+ flow_lin: bool
+ ucap_lin: bool
+ loop_flow: bool
+ flow_quad: bool
+ cong_fee_alg: bool
+ cong_fee_abs: bool
+ marg_cost: bool
+ cong_prob_plus: bool
+ cong_prob_minus: bool
+ hurdle_cost: bool
+ # since v8.1
+ res_generation_by_plant: bool
+ misc_dtg_2: bool
+ misc_dtg_3: bool
+ misc_dtg_4: bool
+ wind_offshore: bool
+ wind_onshore: bool
+ solar_concrt: bool
+ solar_pv: bool
+ solar_rooft: bool
+ renw_1: bool
+ renw_2: bool
+ renw_3: bool
+ renw_4: bool
+ # since v8.3
+ dens: bool
+ profit_by_plant: bool
+ # topic: Short-Term Storages
+ # since v8.6
+ sts_inj_by_plant: bool
+ sts_withdrawal_by_plant: bool
+ sts_lvl_by_plant: bool
+ sts_cashflow_by_cluster: bool
+ # topic: Short-Term Storages - Group
+ psp_open_injection: bool
+ psp_open_withdrawal: bool
+ psp_open_level: bool
+ psp_closed_injection: bool
+ psp_closed_withdrawal: bool
+ psp_closed_level: bool
+ pondage_injection: bool
+ pondage_withdrawal: bool
+ pondage_level: bool
+ battery_injection: bool
+ battery_withdrawal: bool
+ battery_level: bool
+ other1_injection: bool
+ other1_withdrawal: bool
+ other1_level: bool
+ other2_injection: bool
+ other2_withdrawal: bool
+ other2_level: bool
+ other3_injection: bool
+ other3_withdrawal: bool
+ other3_level: bool
+ other4_injection: bool
+ other4_withdrawal: bool
+ other4_level: bool
+ other5_injection: bool
+ other5_withdrawal: bool
+ other5_level: bool
+
+
+_GENERAL = "General"
+_SHORT_TERM_STORAGES = "Short-Term Storages"
+_SHORT_TERM_STORAGES_GROUP = "Short-Term Storages - Group"
+
+FIELDS_INFO: t.Mapping[str, t.Mapping[str, t.Any]] = {
+ # fmt: off
+ "ov_cost": {"topic": _GENERAL, "path": "OV. COST", "default_value": True},
+ "op_cost": {"topic": _GENERAL, "path": "OP. COST", "default_value": True},
+ "mrg_price": {"topic": _GENERAL, "path": "MRG. PRICE", "default_value": True},
+ "co2_emis": {"topic": _GENERAL, "path": "CO2 EMIS.", "default_value": True},
+ "dtg_by_plant": {"topic": _GENERAL, "path": "DTG by plant", "default_value": True},
+ "balance": {"topic": _GENERAL, "path": "BALANCE", "default_value": True},
+ "row_bal": {"topic": _GENERAL, "path": "ROW BAL.", "default_value": True},
+ "psp": {"topic": _GENERAL, "path": "PSP", "default_value": True},
+ "misc_ndg": {"topic": _GENERAL, "path": "MISC. NDG", "default_value": True},
+ "load": {"topic": _GENERAL, "path": "LOAD", "default_value": True},
+ "h_ror": {"topic": _GENERAL, "path": "H. ROR", "default_value": True},
+ "wind": {"topic": _GENERAL, "path": "WIND", "default_value": True},
+ "solar": {"topic": _GENERAL, "path": "SOLAR", "default_value": True},
+ "nuclear": {"topic": _GENERAL, "path": "NUCLEAR", "default_value": True},
+ "lignite": {"topic": _GENERAL, "path": "LIGNITE", "default_value": True},
+ "coal": {"topic": _GENERAL, "path": "COAL", "default_value": True},
+ "gas": {"topic": _GENERAL, "path": "GAS", "default_value": True},
+ "oil": {"topic": _GENERAL, "path": "OIL", "default_value": True},
+ "mix_fuel": {"topic": _GENERAL, "path": "MIX. FUEL", "default_value": True},
+ "misc_dtg": {"topic": _GENERAL, "path": "MISC. DTG", "default_value": True},
+ "h_stor": {"topic": _GENERAL, "path": "H. STOR", "default_value": True},
+ "h_pump": {"topic": _GENERAL, "path": "H. PUMP", "default_value": True},
+ "h_lev": {"topic": _GENERAL, "path": "H. LEV", "default_value": True},
+ "h_infl": {"topic": _GENERAL, "path": "H. INFL", "default_value": True},
+ "h_ovfl": {"topic": _GENERAL, "path": "H. OVFL", "default_value": True},
+ "h_val": {"topic": _GENERAL, "path": "H. VAL", "default_value": True},
+ "h_cost": {"topic": _GENERAL, "path": "H. COST", "default_value": True},
+ "unsp_enrg": {"topic": _GENERAL, "path": "UNSP. ENRG", "default_value": True},
+ "spil_enrg": {"topic": _GENERAL, "path": "SPIL. ENRG", "default_value": True},
+ "lold": {"topic": _GENERAL, "path": "LOLD", "default_value": True},
+ "lolp": {"topic": _GENERAL, "path": "LOLP", "default_value": True},
+ "avl_dtg": {"topic": _GENERAL, "path": "AVL DTG", "default_value": True},
+ "dtg_mrg": {"topic": _GENERAL, "path": "DTG MRG", "default_value": True},
+ "max_mrg": {"topic": _GENERAL, "path": "MAX MRG", "default_value": True},
+ "np_cost": {"topic": _GENERAL, "path": "NP COST", "default_value": True},
+ "np_cost_by_plant": {"topic": _GENERAL, "path": "NP Cost by plant", "default_value": True},
+ "nodu": {"topic": _GENERAL, "path": "NODU", "default_value": True},
+ "nodu_by_plant": {"topic": _GENERAL, "path": "NODU by plant", "default_value": True},
+ "flow_lin": {"topic": _GENERAL, "path": "FLOW LIN.", "default_value": True},
+ "ucap_lin": {"topic": _GENERAL, "path": "UCAP LIN.", "default_value": True},
+ "loop_flow": {"topic": _GENERAL, "path": "LOOP FLOW", "default_value": True},
+ "flow_quad": {"topic": _GENERAL, "path": "FLOW QUAD.", "default_value": True},
+ "cong_fee_alg": {"topic": _GENERAL, "path": "CONG. FEE (ALG.)", "default_value": True},
+ "cong_fee_abs": {"topic": _GENERAL, "path": "CONG. FEE (ABS.)", "default_value": True},
+ "marg_cost": {"topic": _GENERAL, "path": "MARG. COST", "default_value": True},
+ "cong_prob_plus": {"topic": _GENERAL, "path": "CONG. PROB +", "default_value": True},
+ "cong_prob_minus": {"topic": _GENERAL, "path": "CONG. PROB -", "default_value": True},
+ "hurdle_cost": {"topic": _GENERAL, "path": "HURDLE COST", "default_value": True},
+ # since v8.1
+ "res_generation_by_plant": {"topic": _GENERAL, "path": "RES generation by plant", "default_value": True, "start_version": 810},
+ "misc_dtg_2": {"topic": _GENERAL, "path": "MISC. DTG 2", "default_value": True, "start_version": 810},
+ "misc_dtg_3": {"topic": _GENERAL, "path": "MISC. DTG 3", "default_value": True, "start_version": 810},
+ "misc_dtg_4": {"topic": _GENERAL, "path": "MISC. DTG 4", "default_value": True, "start_version": 810},
+ "wind_offshore": {"topic": _GENERAL, "path": "WIND OFFSHORE", "default_value": True, "start_version": 810},
+ "wind_onshore": {"topic": _GENERAL, "path": "WIND ONSHORE", "default_value": True, "start_version": 810},
+ "solar_concrt": {"topic": _GENERAL, "path": "SOLAR CONCRT.", "default_value": True, "start_version": 810},
+ "solar_pv": {"topic": _GENERAL, "path": "SOLAR PV", "default_value": True, "start_version": 810},
+ "solar_rooft": {"topic": _GENERAL, "path": "SOLAR ROOFT", "default_value": True, "start_version": 810},
+ "renw_1": {"topic": _GENERAL, "path": "RENW. 1", "default_value": True, "start_version": 810},
+ "renw_2": {"topic": _GENERAL, "path": "RENW. 2", "default_value": True, "start_version": 810},
+ "renw_3": {"topic": _GENERAL, "path": "RENW. 3", "default_value": True, "start_version": 810},
+ "renw_4": {"topic": _GENERAL, "path": "RENW. 4", "default_value": True, "start_version": 810},
+ # since v8.3
+ "dens": {"topic": _GENERAL, "path": "DENS", "default_value": True, "start_version": 830},
+ "profit_by_plant": {"topic": _GENERAL, "path": "Profit by plant", "default_value": True, "start_version": 830},
+ # topic: "Short-Term Storages"
+ # since v8.6
+ "sts_inj_by_plant": {"topic": _SHORT_TERM_STORAGES, "path": "STS inj by plant", "default_value": True, "start_version": 860},
+ "sts_withdrawal_by_plant": {"topic": _SHORT_TERM_STORAGES, "path": "STS withdrawal by plant", "default_value": True, "start_version": 860},
+ "sts_lvl_by_plant": {"topic": _SHORT_TERM_STORAGES, "path": "STS lvl by plant", "default_value": True, "start_version": 860},
+ "sts_cashflow_by_cluster": {"topic": _SHORT_TERM_STORAGES, "path": "STS Cashflow By Cluster", "default_value": True, "start_version": 860},
+ # topic: "Short-Term Storages - Group"
+ "psp_open_injection": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "PSP_open_injection", "default_value": True, "start_version": 860},
+ "psp_open_withdrawal": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "PSP_open_withdrawal", "default_value": True, "start_version": 860},
+ "psp_open_level": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "PSP_open_level", "default_value": True, "start_version": 860},
+ "psp_closed_injection": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "PSP_closed_injection", "default_value": True, "start_version": 860},
+ "psp_closed_withdrawal": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "PSP_closed_withdrawal", "default_value": True, "start_version": 860},
+ "psp_closed_level": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "PSP_closed_level", "default_value": True, "start_version": 860},
+ "pondage_injection": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Pondage_injection", "default_value": True, "start_version": 860},
+ "pondage_withdrawal": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Pondage_withdrawal", "default_value": True, "start_version": 860},
+ "pondage_level": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Pondage_level", "default_value": True, "start_version": 860},
+ "battery_injection": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Battery_injection", "default_value": True, "start_version": 860},
+ "battery_withdrawal": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Battery_withdrawal", "default_value": True, "start_version": 860},
+ "battery_level": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Battery_level", "default_value": True, "start_version": 860},
+ "other1_injection": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Other1_injection", "default_value": True, "start_version": 860},
+ "other1_withdrawal": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Other1_withdrawal", "default_value": True, "start_version": 860},
+ "other1_level": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Other1_level", "default_value": True, "start_version": 860},
+ "other2_injection": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Other2_injection", "default_value": True, "start_version": 860},
+ "other2_withdrawal": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Other2_withdrawal", "default_value": True, "start_version": 860},
+ "other2_level": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Other2_level", "default_value": True, "start_version": 860},
+ "other3_injection": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Other3_injection", "default_value": True, "start_version": 860},
+ "other3_withdrawal": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Other3_withdrawal", "default_value": True, "start_version": 860},
+ "other3_level": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Other3_level", "default_value": True, "start_version": 860},
+ "other4_injection": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Other4_injection", "default_value": True, "start_version": 860},
+ "other4_withdrawal": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Other4_withdrawal", "default_value": True, "start_version": 860},
+ "other4_level": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Other4_level", "default_value": True, "start_version": 860},
+ "other5_injection": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Other5_injection", "default_value": True, "start_version": 860},
+ "other5_withdrawal": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Other5_withdrawal", "default_value": True, "start_version": 860},
+ "other5_level": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Other5_level", "default_value": True, "start_version": 860},
+ # fmt: on
+}
+
+
+def get_fields_info(study_version: t.Union[str, int]) -> t.Mapping[str, t.Mapping[str, t.Any]]:
+ study_version = int(study_version)
+ return {key: info for key, info in FIELDS_INFO.items() if (info.get("start_version") or 0) <= study_version}
diff --git a/antarest/study/business/thematic_trimming_management.py b/antarest/study/business/thematic_trimming_management.py
index 1ebfeebe04..d4af9f960e 100644
--- a/antarest/study/business/thematic_trimming_management.py
+++ b/antarest/study/business/thematic_trimming_management.py
@@ -1,162 +1,12 @@
import typing as t
-from antarest.study.business.utils import (
- GENERAL_DATA_PATH,
- AllOptionalMetaclass,
- FieldInfo,
- FormFieldsBaseModel,
- execute_or_add_commands,
-)
+from antarest.study.business.thematic_trimming_field_infos import ThematicTrimmingFormFields, get_fields_info
+from antarest.study.business.utils import GENERAL_DATA_PATH, execute_or_add_commands
from antarest.study.model import Study
from antarest.study.storage.storage_service import StudyStorageService
from antarest.study.storage.variantstudy.model.command.update_config import UpdateConfig
-class ThematicTrimmingFormFields(FormFieldsBaseModel, metaclass=AllOptionalMetaclass, use_none=True):
- """
- This class manages the configuration of result filtering in a simulation.
-
- This table allows the user to enable or disable specific variables before running a simulation.
- """
-
- ov_cost: bool
- op_cost: bool
- mrg_price: bool
- co2_emis: bool
- dtg_by_plant: bool
- balance: bool
- row_bal: bool
- psp: bool
- misc_ndg: bool
- load: bool
- h_ror: bool
- wind: bool
- solar: bool
- nuclear: bool
- lignite: bool
- coal: bool
- gas: bool
- oil: bool
- mix_fuel: bool
- misc_dtg: bool
- h_stor: bool
- h_pump: bool
- h_lev: bool
- h_infl: bool
- h_ovfl: bool
- h_val: bool
- h_cost: bool
- unsp_enrg: bool
- spil_enrg: bool
- lold: bool
- lolp: bool
- avl_dtg: bool
- dtg_mrg: bool
- max_mrg: bool
- np_cost: bool
- np_cost_by_plant: bool
- nodu: bool
- nodu_by_plant: bool
- flow_lin: bool
- ucap_lin: bool
- loop_flow: bool
- flow_quad: bool
- cong_fee_alg: bool
- cong_fee_abs: bool
- marg_cost: bool
- cong_prob_plus: bool
- cong_prob_minus: bool
- hurdle_cost: bool
- # For study versions >= 810
- res_generation_by_plant: bool
- misc_dtg_2: bool
- misc_dtg_3: bool
- misc_dtg_4: bool
- wind_offshore: bool
- wind_onshore: bool
- solar_concrt: bool
- solar_pv: bool
- solar_rooft: bool
- renw_1: bool
- renw_2: bool
- renw_3: bool
- renw_4: bool
- # For study versions >= 830
- dens: bool
- profit_by_plant: bool
-
-
-FIELDS_INFO: t.Dict[str, FieldInfo] = {
- "ov_cost": {"path": "OV. COST", "default_value": True},
- "op_cost": {"path": "OP. COST", "default_value": True},
- "mrg_price": {"path": "MRG. PRICE", "default_value": True},
- "co2_emis": {"path": "CO2 EMIS.", "default_value": True},
- "dtg_by_plant": {"path": "DTG by plant", "default_value": True},
- "balance": {"path": "BALANCE", "default_value": True},
- "row_bal": {"path": "ROW BAL.", "default_value": True},
- "psp": {"path": "PSP", "default_value": True},
- "misc_ndg": {"path": "MISC. NDG", "default_value": True},
- "load": {"path": "LOAD", "default_value": True},
- "h_ror": {"path": "H. ROR", "default_value": True},
- "wind": {"path": "WIND", "default_value": True},
- "solar": {"path": "SOLAR", "default_value": True},
- "nuclear": {"path": "NUCLEAR", "default_value": True},
- "lignite": {"path": "LIGNITE", "default_value": True},
- "coal": {"path": "COAL", "default_value": True},
- "gas": {"path": "GAS", "default_value": True},
- "oil": {"path": "OIL", "default_value": True},
- "mix_fuel": {"path": "MIX. FUEL", "default_value": True},
- "misc_dtg": {"path": "MISC. DTG", "default_value": True},
- "h_stor": {"path": "H. STOR", "default_value": True},
- "h_pump": {"path": "H. PUMP", "default_value": True},
- "h_lev": {"path": "H. LEV", "default_value": True},
- "h_infl": {"path": "H. INFL", "default_value": True},
- "h_ovfl": {"path": "H. OVFL", "default_value": True},
- "h_val": {"path": "H. VAL", "default_value": True},
- "h_cost": {"path": "H. COST", "default_value": True},
- "unsp_enrg": {"path": "UNSP. ENRG", "default_value": True},
- "spil_enrg": {"path": "SPIL. ENRG", "default_value": True},
- "lold": {"path": "LOLD", "default_value": True},
- "lolp": {"path": "LOLP", "default_value": True},
- "avl_dtg": {"path": "AVL DTG", "default_value": True},
- "dtg_mrg": {"path": "DTG MRG", "default_value": True},
- "max_mrg": {"path": "MAX MRG", "default_value": True},
- "np_cost": {"path": "NP COST", "default_value": True},
- "np_cost_by_plant": {"path": "NP Cost by plant", "default_value": True},
- "nodu": {"path": "NODU", "default_value": True},
- "nodu_by_plant": {"path": "NODU by plant", "default_value": True},
- "flow_lin": {"path": "FLOW LIN.", "default_value": True},
- "ucap_lin": {"path": "UCAP LIN.", "default_value": True},
- "loop_flow": {"path": "LOOP FLOW", "default_value": True},
- "flow_quad": {"path": "FLOW QUAD.", "default_value": True},
- "cong_fee_alg": {"path": "CONG. FEE (ALG.)", "default_value": True},
- "cong_fee_abs": {"path": "CONG. FEE (ABS.)", "default_value": True},
- "marg_cost": {"path": "MARG. COST", "default_value": True},
- "cong_prob_plus": {"path": "CONG. PROB +", "default_value": True},
- "cong_prob_minus": {"path": "CONG. PROB -", "default_value": True},
- "hurdle_cost": {"path": "HURDLE COST", "default_value": True},
- "res_generation_by_plant": {"path": "RES generation by plant", "default_value": True, "start_version": 810},
- "misc_dtg_2": {"path": "MISC. DTG 2", "default_value": True, "start_version": 810},
- "misc_dtg_3": {"path": "MISC. DTG 3", "default_value": True, "start_version": 810},
- "misc_dtg_4": {"path": "MISC. DTG 4", "default_value": True, "start_version": 810},
- "wind_offshore": {"path": "WIND OFFSHORE", "default_value": True, "start_version": 810},
- "wind_onshore": {"path": "WIND ONSHORE", "default_value": True, "start_version": 810},
- "solar_concrt": {"path": "SOLAR CONCRT.", "default_value": True, "start_version": 810},
- "solar_pv": {"path": "SOLAR PV", "default_value": True, "start_version": 810},
- "solar_rooft": {"path": "SOLAR ROOFT", "default_value": True, "start_version": 810},
- "renw_1": {"path": "RENW. 1", "default_value": True, "start_version": 810},
- "renw_2": {"path": "RENW. 2", "default_value": True, "start_version": 810},
- "renw_3": {"path": "RENW. 3", "default_value": True, "start_version": 810},
- "renw_4": {"path": "RENW. 4", "default_value": True, "start_version": 810},
- "dens": {"path": "DENS", "default_value": True, "start_version": 830},
- "profit_by_plant": {"path": "Profit by plant", "default_value": True, "start_version": 830},
-}
-
-
-def get_fields_info(study_version: int) -> t.Mapping[str, FieldInfo]:
- return {key: info for key, info in FIELDS_INFO.items() if (info.get("start_version") or -1) <= study_version}
-
-
class ThematicTrimmingManager:
def __init__(self, storage_service: StudyStorageService) -> None:
self.storage_service = storage_service
@@ -172,7 +22,7 @@ def get_field_values(self, study: Study) -> ThematicTrimmingFormFields:
include_vars = trimming_config.get("select_var +") or []
selected_vars_reset = trimming_config.get("selected_vars_reset", True)
- def get_value(field_info: FieldInfo) -> t.Any:
+ def get_value(field_info: t.Mapping[str, t.Any]) -> t.Any:
if selected_vars_reset is None:
return field_info["default_value"]
var_name = field_info["path"]
diff --git a/antarest/study/web/study_data_blueprint.py b/antarest/study/web/study_data_blueprint.py
index bc667f45d5..ddc38ef60d 100644
--- a/antarest/study/web/study_data_blueprint.py
+++ b/antarest/study/web/study_data_blueprint.py
@@ -53,7 +53,7 @@
from antarest.study.business.optimization_management import OptimizationFormFields
from antarest.study.business.playlist_management import PlaylistColumns
from antarest.study.business.table_mode_management import ColumnsModelTypes, TableTemplateType
-from antarest.study.business.thematic_trimming_management import ThematicTrimmingFormFields
+from antarest.study.business.thematic_trimming_field_infos import ThematicTrimmingFormFields
from antarest.study.business.timeseries_config_management import TSFormFields
from antarest.study.model import PatchArea, PatchCluster
from antarest.study.service import StudyService
diff --git a/docs/CHANGELOG.md b/docs/CHANGELOG.md
index a64b8ad2a5..63cce3100b 100644
--- a/docs/CHANGELOG.md
+++ b/docs/CHANGELOG.md
@@ -17,6 +17,7 @@ v2.16.8 (2024-04-19)
* **outputs:** build outputs config even when using cache [`#1958`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1958)
* **comments:** use a command to update comments on a variant [`#1959`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1959)
* **outputs (ui):** correct weekly data formatting to support 53-week years [`#1975`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1975)
+* **configuration:** add missing variables in Thematic Trimming for studies in version v8.6 or above [`#1992`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1992)
v2.16.7 (2024-03-05)
diff --git a/tests/integration/studies_blueprint/test_comments.py b/tests/integration/studies_blueprint/test_comments.py
index b282ed8781..378be0aed5 100644
--- a/tests/integration/studies_blueprint/test_comments.py
+++ b/tests/integration/studies_blueprint/test_comments.py
@@ -88,7 +88,7 @@ def test_variant_study(
res = client.post(
f"/v1/studies/{base_study_id}/variants",
headers={"Authorization": f"Bearer {user_access_token}"},
- params={"name": f"Variant XYZ"},
+ params={"name": "Variant XYZ"},
)
assert res.status_code == 200, res.json() # should be CREATED
variant_id = res.json()
diff --git a/tests/integration/studies_blueprint/test_synthesis.py b/tests/integration/studies_blueprint/test_synthesis.py
index aa6141e782..982cc16773 100644
--- a/tests/integration/studies_blueprint/test_synthesis.py
+++ b/tests/integration/studies_blueprint/test_synthesis.py
@@ -84,7 +84,7 @@ def test_variant_study(
res = client.post(
f"/v1/studies/{base_study_id}/variants",
headers={"Authorization": f"Bearer {user_access_token}"},
- params={"name": f"Variant XYZ"},
+ params={"name": "Variant XYZ"},
)
assert res.status_code == 200, res.json() # should be CREATED
variant_id = res.json()
diff --git a/tests/integration/test_integration.py b/tests/integration/test_integration.py
index 8065039c92..d9a03481ae 100644
--- a/tests/integration/test_integration.py
+++ b/tests/integration/test_integration.py
@@ -879,74 +879,103 @@ def test_area_management(client: TestClient, admin_access_token: str, study_id:
# Thematic trimming form
- res_thematic_trimming_config = client.get(
- f"/v1/studies/{study_id}/config/thematictrimming/form", headers=admin_headers
- )
- res_thematic_trimming_config_json = res_thematic_trimming_config.json()
- assert res_thematic_trimming_config_json == {
- "ovCost": True,
- "opCost": True,
- "mrgPrice": True,
- "co2Emis": True,
- "dtgByPlant": True,
+ res = client.get(f"/v1/studies/{study_id}/config/thematictrimming/form", headers=admin_headers)
+ obj = res.json()
+ assert obj == {
+ "avlDtg": True,
"balance": True,
- "rowBal": True,
- "psp": True,
- "miscNdg": True,
- "load": True,
- "hRor": True,
- "wind": True,
- "solar": True,
- "nuclear": True,
- "lignite": True,
+ "batteryInjection": True,
+ "batteryLevel": True,
+ "batteryWithdrawal": True,
+ "co2Emis": True,
"coal": True,
+ "congFeeAbs": True,
+ "congFeeAlg": True,
+ "congProbMinus": True,
+ "congProbPlus": True,
+ "dens": True,
+ "dtgByPlant": True,
+ "dtgMrg": True,
+ "flowLin": True,
+ "flowQuad": True,
"gas": True,
- "oil": True,
- "mixFuel": True,
- "miscDtg": True,
- "hStor": True,
- "hPump": True,
- "hLev": True,
+ "hCost": True,
"hInfl": True,
+ "hLev": True,
"hOvfl": True,
+ "hPump": True,
+ "hRor": True,
+ "hStor": True,
"hVal": True,
- "hCost": True,
- "unspEnrg": True,
- "spilEnrg": True,
+ "hurdleCost": True,
+ "lignite": True,
+ "load": True,
"lold": True,
"lolp": True,
- "avlDtg": True,
- "dtgMrg": True,
- "maxMrg": True,
- "npCost": True,
- "npCostByPlant": True,
- "nodu": True,
- "noduByPlant": True,
- "flowLin": True,
- "ucapLin": True,
"loopFlow": True,
- "flowQuad": True,
- "congFeeAlg": True,
- "congFeeAbs": True,
"margCost": True,
- "congProbPlus": True,
- "congProbMinus": True,
- "hurdleCost": True,
- "resGenerationByPlant": True,
+ "maxMrg": True,
+ "miscDtg": True,
"miscDtg2": True,
"miscDtg3": True,
"miscDtg4": True,
- "windOffshore": True,
- "windOnshore": True,
- "solarConcrt": True,
- "solarPv": True,
- "solarRooft": True,
+ "miscNdg": True,
+ "mixFuel": True,
+ "mrgPrice": True,
+ "nodu": True,
+ "noduByPlant": True,
+ "npCost": True,
+ "npCostByPlant": True,
+ "nuclear": True,
+ "oil": True,
+ "opCost": True,
+ "other1Injection": True,
+ "other1Level": True,
+ "other1Withdrawal": True,
+ "other2Injection": True,
+ "other2Level": True,
+ "other2Withdrawal": True,
+ "other3Injection": True,
+ "other3Level": True,
+ "other3Withdrawal": True,
+ "other4Injection": True,
+ "other4Level": True,
+ "other4Withdrawal": True,
+ "other5Injection": True,
+ "other5Level": True,
+ "other5Withdrawal": True,
+ "ovCost": True,
+ "pondageInjection": True,
+ "pondageLevel": True,
+ "pondageWithdrawal": True,
+ "profitByPlant": True,
+ "psp": True,
+ "pspClosedInjection": True,
+ "pspClosedLevel": True,
+ "pspClosedWithdrawal": True,
+ "pspOpenInjection": True,
+ "pspOpenLevel": True,
+ "pspOpenWithdrawal": True,
"renw1": True,
"renw2": True,
"renw3": True,
"renw4": True,
- "dens": True,
- "profitByPlant": True,
+ "resGenerationByPlant": True,
+ "rowBal": True,
+ "solar": True,
+ "solarConcrt": True,
+ "solarPv": True,
+ "solarRooft": True,
+ "spilEnrg": True,
+ "stsCashflowByCluster": True,
+ "stsInjByPlant": True,
+ "stsLvlByPlant": True,
+ "stsWithdrawalByPlant": True,
+ "ucapLin": True,
+ "unspEnrg": True,
+ "wind": True,
+ "windOffshore": True,
+ "windOnshore": True,
}
client.put(
@@ -1018,74 +1047,103 @@ def test_area_management(client: TestClient, admin_access_token: str, study_id:
"profitByPlant": True,
},
)
- res_thematic_trimming_config = client.get(
- f"/v1/studies/{study_id}/config/thematictrimming/form", headers=admin_headers
- )
- res_thematic_trimming_config_json = res_thematic_trimming_config.json()
- assert res_thematic_trimming_config_json == {
- "ovCost": False,
- "opCost": True,
- "mrgPrice": True,
- "co2Emis": True,
- "dtgByPlant": True,
+ res = client.get(f"/v1/studies/{study_id}/config/thematictrimming/form", headers=admin_headers)
+ obj = res.json()
+ assert obj == {
+ "avlDtg": True,
"balance": True,
- "rowBal": True,
- "psp": True,
- "miscNdg": True,
- "load": True,
- "hRor": True,
- "wind": True,
- "solar": True,
- "nuclear": True,
- "lignite": True,
+ "batteryInjection": True,
+ "batteryLevel": True,
+ "batteryWithdrawal": True,
+ "co2Emis": True,
"coal": True,
+ "congFeeAbs": True,
+ "congFeeAlg": True,
+ "congProbMinus": True,
+ "congProbPlus": True,
+ "dens": True,
+ "dtgByPlant": True,
+ "dtgMrg": True,
+ "flowLin": True,
+ "flowQuad": True,
"gas": True,
- "oil": True,
- "mixFuel": True,
- "miscDtg": True,
- "hStor": True,
- "hPump": True,
- "hLev": True,
+ "hCost": True,
"hInfl": True,
+ "hLev": True,
"hOvfl": True,
- "hVal": False,
- "hCost": True,
- "unspEnrg": True,
- "spilEnrg": True,
+ "hPump": True,
+ "hRor": True,
+ "hStor": True,
+ "hVal": True,
+ "hurdleCost": True,
+ "lignite": True,
+ "load": True,
"lold": True,
"lolp": True,
- "avlDtg": True,
- "dtgMrg": True,
- "maxMrg": True,
- "npCost": True,
- "npCostByPlant": True,
- "nodu": True,
- "noduByPlant": True,
- "flowLin": True,
- "ucapLin": True,
"loopFlow": True,
- "flowQuad": True,
- "congFeeAlg": True,
- "congFeeAbs": True,
"margCost": True,
- "congProbPlus": True,
- "congProbMinus": True,
- "hurdleCost": True,
- "resGenerationByPlant": True,
+ "maxMrg": True,
+ "miscDtg": True,
"miscDtg2": True,
"miscDtg3": True,
"miscDtg4": True,
- "windOffshore": True,
- "windOnshore": True,
- "solarConcrt": True,
- "solarPv": True,
- "solarRooft": True,
+ "miscNdg": True,
+ "mixFuel": True,
+ "mrgPrice": True,
+ "nodu": True,
+ "noduByPlant": True,
+ "npCost": True,
+ "npCostByPlant": True,
+ "nuclear": True,
+ "oil": True,
+ "opCost": True,
+ "other1Injection": True,
+ "other1Level": True,
+ "other1Withdrawal": True,
+ "other2Injection": True,
+ "other2Level": True,
+ "other2Withdrawal": True,
+ "other3Injection": True,
+ "other3Level": True,
+ "other3Withdrawal": True,
+ "other4Injection": True,
+ "other4Level": True,
+ "other4Withdrawal": True,
+ "other5Injection": True,
+ "other5Level": True,
+ "other5Withdrawal": True,
+ "ovCost": True,
+ "pondageInjection": True,
+ "pondageLevel": True,
+ "pondageWithdrawal": True,
+ "profitByPlant": True,
+ "psp": True,
+ "pspClosedInjection": True,
+ "pspClosedLevel": True,
+ "pspClosedWithdrawal": True,
+ "pspOpenInjection": True,
+ "pspOpenLevel": True,
+ "pspOpenWithdrawal": True,
"renw1": True,
- "renw2": False,
+ "renw2": True,
"renw3": True,
"renw4": True,
- "dens": True,
- "profitByPlant": True,
+ "resGenerationByPlant": True,
+ "rowBal": True,
+ "solar": True,
+ "solarConcrt": True,
+ "solarPv": True,
+ "solarRooft": True,
+ "spilEnrg": True,
+ "stsCashflowByCluster": True,
+ "stsInjByPlant": True,
+ "stsLvlByPlant": True,
+ "stsWithdrawalByPlant": True,
+ "ucapLin": True,
+ "unspEnrg": True,
+ "wind": True,
+ "windOffshore": True,
+ "windOnshore": True,
}
# Properties form
diff --git a/tests/integration/test_integration_watcher.py b/tests/integration/test_integration_watcher.py
index 0ac0e3afd0..e151a3ade3 100644
--- a/tests/integration/test_integration_watcher.py
+++ b/tests/integration/test_integration_watcher.py
@@ -8,11 +8,5 @@ def test_integration_xpansion(app: FastAPI, tmp_path: str):
admin_credentials = res.json()
headers = {"Authorization": f'Bearer {admin_credentials["access_token"]}'}
- client.post(
- f"/v1/watcher/_scan",
- headers=headers,
- )
- client.post(
- f"/v1/watcher/_scan?path=/tmp",
- headers=headers,
- )
+ client.post("/v1/watcher/_scan", headers=headers)
+ client.post("/v1/watcher/_scan?path=/tmp", headers=headers)
diff --git a/tests/launcher/test_web.py b/tests/launcher/test_web.py
index 99799abbde..e0800cf019 100644
--- a/tests/launcher/test_web.py
+++ b/tests/launcher/test_web.py
@@ -100,7 +100,7 @@ def test_jobs() -> None:
assert res.status_code == 200
assert [JobResultDTO.parse_obj(j) for j in res.json()] == [result.to_dto()]
- res = client.get(f"/v1/launcher/jobs")
+ res = client.get("/v1/launcher/jobs")
assert res.status_code == 200
assert [JobResultDTO.parse_obj(j) for j in res.json()] == [result.to_dto()]
service.get_jobs.assert_has_calls(
diff --git a/tests/login/test_model.py b/tests/login/test_model.py
index 2dee1d994e..e1ef0bc928 100644
--- a/tests/login/test_model.py
+++ b/tests/login/test_model.py
@@ -1,7 +1,4 @@
-import contextlib
-
from sqlalchemy.engine.base import Engine # type: ignore
-from sqlalchemy.exc import IntegrityError # type: ignore
from sqlalchemy.orm import sessionmaker # type: ignore
from antarest.login.model import (
diff --git a/tests/login/test_repository.py b/tests/login/test_repository.py
index 60bdbc0dbf..5ab7406dc4 100644
--- a/tests/login/test_repository.py
+++ b/tests/login/test_repository.py
@@ -1,5 +1,5 @@
import pytest
-from sqlalchemy.orm import Session, scoped_session, sessionmaker # type: ignore
+from sqlalchemy.orm import Session # type: ignore
from antarest.login.model import Bot, Group, Password, Role, RoleType, User, UserLdap
from antarest.login.repository import BotRepository, GroupRepository, RoleRepository, UserLdapRepository, UserRepository
diff --git a/tests/storage/business/test_config_manager.py b/tests/storage/business/test_config_manager.py
index 2e20aae081..f4d344a27d 100644
--- a/tests/storage/business/test_config_manager.py
+++ b/tests/storage/business/test_config_manager.py
@@ -1,8 +1,8 @@
from pathlib import Path
from unittest.mock import Mock
+from antarest.study.business.thematic_trimming_field_infos import FIELDS_INFO
from antarest.study.business.thematic_trimming_management import (
- FIELDS_INFO,
ThematicTrimmingFormFields,
ThematicTrimmingManager,
get_fields_info,
@@ -112,4 +112,4 @@ def test_thematic_trimming_config() -> None:
)
)
- assert len(FIELDS_INFO) == 63
+ assert len(FIELDS_INFO) == 94
diff --git a/tests/storage/business/test_variant_study_service.py b/tests/storage/business/test_variant_study_service.py
index 8c6ac3602c..7c6e00f99c 100644
--- a/tests/storage/business/test_variant_study_service.py
+++ b/tests/storage/business/test_variant_study_service.py
@@ -92,7 +92,7 @@ def task_status(*args):
yield t
study_service.task_service.status_task.side_effect = task_status()
- with pytest.raises(VariantGenerationError, match=f"Error while generating study2.py"):
+ with pytest.raises(VariantGenerationError, match="Error while generating study2.py"):
study_service.get(metadata=metadata, url=sub_route, depth=2)
study_service.task_service.await_task.assert_called()
diff --git a/tests/storage/repository/filesystem/test_lazy_node.py b/tests/storage/repository/filesystem/test_lazy_node.py
index e0e8e91e5a..f899d32fa3 100644
--- a/tests/storage/repository/filesystem/test_lazy_node.py
+++ b/tests/storage/repository/filesystem/test_lazy_node.py
@@ -112,7 +112,7 @@ def test_save_uri(tmp_path: Path):
context = ContextServer(matrix=Mock(), resolver=resolver)
node = MockLazyNode(context=context, config=config)
- uri = f"matrix://id"
+ uri = "matrix://id"
node.save(uri)
assert (file.parent / f"{file.name}.link").read_text() == uri
assert not file.exists()
diff --git a/tests/study/storage/variantstudy/test_snapshot_generator.py b/tests/study/storage/variantstudy/test_snapshot_generator.py
index 5e90b6ee06..2365049432 100644
--- a/tests/study/storage/variantstudy/test_snapshot_generator.py
+++ b/tests/study/storage/variantstudy/test_snapshot_generator.py
@@ -10,7 +10,6 @@
import numpy as np
import pytest
-from sqlalchemy import event # type: ignore
from antarest.core.exceptions import VariantGenerationError
from antarest.core.interfaces.cache import CacheConstants
diff --git a/tests/study/storage/variantstudy/test_variant_study_service.py b/tests/study/storage/variantstudy/test_variant_study_service.py
index 25317a9589..9dce83e735 100644
--- a/tests/study/storage/variantstudy/test_variant_study_service.py
+++ b/tests/study/storage/variantstudy/test_variant_study_service.py
@@ -5,7 +5,6 @@
import numpy as np
import pytest
-from sqlalchemy import create_engine # type: ignore
from antarest.core.model import PublicMode
from antarest.core.requests import RequestParameters
diff --git a/tests/variantstudy/model/command/test_create_cluster.py b/tests/variantstudy/model/command/test_create_cluster.py
index 4fdeb3c488..6554bbe6c2 100644
--- a/tests/variantstudy/model/command/test_create_cluster.py
+++ b/tests/variantstudy/model/command/test_create_cluster.py
@@ -247,17 +247,17 @@ def test_create_diff(command_context: CommandContext):
assert base.create_diff(other_match) == [
ReplaceMatrix(
- target=f"input/thermal/prepro/foo/foo/data",
+ target="input/thermal/prepro/foo/foo/data",
matrix=prepro_b,
command_context=command_context,
),
ReplaceMatrix(
- target=f"input/thermal/prepro/foo/foo/modulation",
+ target="input/thermal/prepro/foo/foo/modulation",
matrix=modulation_b,
command_context=command_context,
),
UpdateConfig(
- target=f"input/thermal/clusters/foo/list/foo",
+ target="input/thermal/clusters/foo/list/foo",
data={"nominalcapacity": "2400"},
command_context=command_context,
),
diff --git a/tests/variantstudy/model/command/test_create_link.py b/tests/variantstudy/model/command/test_create_link.py
index 413e97038d..b2e8715ae3 100644
--- a/tests/variantstudy/model/command/test_create_link.py
+++ b/tests/variantstudy/model/command/test_create_link.py
@@ -254,12 +254,12 @@ def test_create_diff(command_context: CommandContext):
assert base.create_diff(other_match) == [
UpdateConfig(
- target=f"input/links/bar/properties/foo",
+ target="input/links/bar/properties/foo",
data=CreateLink.generate_link_properties({"hurdles-cost": "true"}),
command_context=command_context,
),
ReplaceMatrix(
- target=f"@links_series/bar/foo",
+ target="@links_series/bar/foo",
matrix=series_b,
command_context=command_context,
),
diff --git a/tests/variantstudy/model/command/test_create_renewables_cluster.py b/tests/variantstudy/model/command/test_create_renewables_cluster.py
index fc6ac91afe..ecec2fd882 100644
--- a/tests/variantstudy/model/command/test_create_renewables_cluster.py
+++ b/tests/variantstudy/model/command/test_create_renewables_cluster.py
@@ -190,7 +190,7 @@ def test_create_diff(command_context: CommandContext):
)
assert base.create_diff(other_match) == [
UpdateConfig(
- target=f"input/renewables/clusters/foo/list/foo",
+ target="input/renewables/clusters/foo/list/foo",
data={"a": "b"},
command_context=command_context,
),
diff --git a/webapp/src/common/types.ts b/webapp/src/common/types.ts
index 509ac4c4ff..7e62f614e4 100644
--- a/webapp/src/common/types.ts
+++ b/webapp/src/common/types.ts
@@ -414,6 +414,7 @@ export interface Area {
filters_synthesis: string[];
filters_year: string[];
}
+
export interface Set {
name?: string;
inverted_set: boolean;
@@ -447,6 +448,7 @@ export interface FileStudyTreeConfigDTO {
archive_input_series: string[];
enr_modelling: string;
}
+
export interface LinkElement {
id: string;
label: string;
@@ -643,68 +645,3 @@ export interface TaskView {
type: TaskType;
status: string;
}
-
-export interface ThematicTrimmingConfigDTO {
- "OV. COST": boolean;
- "OP. COST": boolean;
- "MRG. PRICE": boolean;
- "CO2 EMIS.": boolean;
- "DTG by plant": boolean;
- BALANCE: boolean;
- "ROW BAL.": boolean;
- PSP: boolean;
- "MISC. NDG": boolean;
- LOAD: boolean;
- "H. ROR": boolean;
- WIND: boolean;
- SOLAR: boolean;
- NUCLEAR: boolean;
- LIGNITE: boolean;
- COAL: boolean;
- GAS: boolean;
- OIL: boolean;
- "MIX. FUEL": boolean;
- "MISC. DTG": boolean;
- "H. STOR": boolean;
- "H. PUMP": boolean;
- "H. LEV": boolean;
- "H. INFL": boolean;
- "H. OVFL": boolean;
- "H. VAL": boolean;
- "H. COST": boolean;
- "UNSP. ENRG": boolean;
- "SPIL. ENRG": boolean;
- LOLD: boolean;
- LOLP: boolean;
- "AVL DTG": boolean;
- "DTG MRG": boolean;
- "MAX MRG": boolean;
- "NP COST": boolean;
- "NP Cost by plant": boolean;
- NODU: boolean;
- "NODU by plant": boolean;
- "FLOW LIN.": boolean;
- "UCAP LIN.": boolean;
- "LOOP FLOW": boolean;
- "FLOW QUAD.": boolean;
- "CONG. FEE (ALG.)": boolean;
- "CONG. FEE (ABS.)": boolean;
- "MARG. COST": boolean;
- "CONG. PROB +": boolean;
- "CONG. PROB -": boolean;
- "HURDLE COST": boolean;
- // Study version >= 810
- "RES generation by plant"?: boolean;
- "MISC. DTG 2"?: boolean;
- "MISC. DTG 3"?: boolean;
- "MISC. DTG 4"?: boolean;
- "WIND OFFSHORE"?: boolean;
- "WIND ONSHORE"?: boolean;
- "SOLAR CONCRT."?: boolean;
- "SOLAR PV"?: boolean;
- "SOLAR ROOFT"?: boolean;
- "RENW. 1"?: boolean;
- "RENW. 2"?: boolean;
- "RENW. 3"?: boolean;
- "RENW. 4"?: boolean;
-}
diff --git a/webapp/src/components/App/Singlestudy/explore/Configuration/General/dialogs/ThematicTrimmingDialog/utils.ts b/webapp/src/components/App/Singlestudy/explore/Configuration/General/dialogs/ThematicTrimmingDialog/utils.ts
index b16491ff6c..89569ba05c 100644
--- a/webapp/src/components/App/Singlestudy/explore/Configuration/General/dialogs/ThematicTrimmingDialog/utils.ts
+++ b/webapp/src/components/App/Singlestudy/explore/Configuration/General/dialogs/ThematicTrimmingDialog/utils.ts
@@ -51,7 +51,7 @@ export interface ThematicTrimmingFormFields {
congProbPlus: boolean;
congProbMinus: boolean;
hurdleCost: boolean;
- // For study versions >= 810
+ // Study version >= 810
resGenerationByPlant?: boolean;
miscDtg2?: boolean;
miscDtg3?: boolean;
@@ -65,9 +65,41 @@ export interface ThematicTrimmingFormFields {
renw2?: boolean;
renw3?: boolean;
renw4?: boolean;
- // For study versions >= 830
+ // Study version >= 830
dens?: boolean;
profitByPlant?: boolean;
+ // Study version >= 860
+ stsInjByPlant?: boolean;
+ stsWithdrawalByPlant?: boolean;
+ stsLvlByPlant?: boolean;
+ stsCashflowByCluster?: boolean;
+ pspOpenInjection?: boolean;
+ pspOpenWithdrawal?: boolean;
+ pspOpenLevel?: boolean;
+ pspClosedInjection?: boolean;
+ pspClosedWithdrawal?: boolean;
+ pspClosedLevel?: boolean;
+ pondageInjection?: boolean;
+ pondageWithdrawal?: boolean;
+ pondageLevel?: boolean;
+ batteryInjection?: boolean;
+ batteryWithdrawal?: boolean;
+ batteryLevel?: boolean;
+ other1Injection?: boolean;
+ other1Withdrawal?: boolean;
+ other1Level?: boolean;
+ other2Injection?: boolean;
+ other2Withdrawal?: boolean;
+ other2Level?: boolean;
+ other3Injection?: boolean;
+ other3Withdrawal?: boolean;
+ other3Level?: boolean;
+ other4Injection?: boolean;
+ other4Withdrawal?: boolean;
+ other4Level?: boolean;
+ other5Injection?: boolean;
+ other5Withdrawal?: boolean;
+ other5Level?: boolean;
}
const keysMap: Record = {
@@ -136,6 +168,38 @@ const keysMap: Record = {
// Study version >= 830
dens: "DENS",
profitByPlant: "Profit by plant",
+ // Study version >= 860
+ stsInjByPlant: "STS inj by plant",
+ stsWithdrawalByPlant: "STS withdrawal by plant",
+ stsLvlByPlant: "STS lvl by plant",
+ stsCashflowByCluster: "STS Cashflow By Cluster",
+ pspOpenInjection: "PSP_open_injection",
+ pspOpenWithdrawal: "PSP_open_withdrawal",
+ pspOpenLevel: "PSP_open_level",
+ pspClosedInjection: "PSP_closed_injection",
+ pspClosedWithdrawal: "PSP_closed_withdrawal",
+ pspClosedLevel: "PSP_closed_level",
+ pondageInjection: "Pondage_injection",
+ pondageWithdrawal: "Pondage_withdrawal",
+ pondageLevel: "Pondage_level",
+ batteryInjection: "Battery_injection",
+ batteryWithdrawal: "Battery_withdrawal",
+ batteryLevel: "Battery_level",
+ other1Injection: "Other1_injection",
+ other1Withdrawal: "Other1_withdrawal",
+ other1Level: "Other1_level",
+ other2Injection: "Other2_injection",
+ other2Withdrawal: "Other2_withdrawal",
+ other2Level: "Other2_level",
+ other3Injection: "Other3_injection",
+ other3Withdrawal: "Other3_withdrawal",
+ other3Level: "Other3_level",
+ other4Injection: "Other4_injection",
+ other4Withdrawal: "Other4_withdrawal",
+ other4Level: "Other4_level",
+ other5Injection: "Other5_injection",
+ other5Withdrawal: "Other5_withdrawal",
+ other5Level: "Other5_level",
};
// Allow to support all study versions by using directly the server config
From 7528adb49b570ee082218d727e0b28446537eb77 Mon Sep 17 00:00:00 2001
From: Laurent LAPORTE
Date: Thu, 11 Apr 2024 07:53:24 +0200
Subject: [PATCH 11/16] feat(configuration): turn Thematic Trimming variable
names in upper case
---
docs/CHANGELOG.md | 1 +
.../dialogs/ThematicTrimmingDialog/utils.ts | 72 +++++++++----------
2 files changed, 37 insertions(+), 36 deletions(-)
diff --git a/docs/CHANGELOG.md b/docs/CHANGELOG.md
index 63cce3100b..cdecb8304c 100644
--- a/docs/CHANGELOG.md
+++ b/docs/CHANGELOG.md
@@ -7,6 +7,7 @@ v2.16.8 (2024-04-19)
### Features
* **clusters:** add new endpoint for clusters duplication [`#1972`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1972)
+* **configuration:** turn Thematic Trimming variable names in upper case
### Bug Fixes
diff --git a/webapp/src/components/App/Singlestudy/explore/Configuration/General/dialogs/ThematicTrimmingDialog/utils.ts b/webapp/src/components/App/Singlestudy/explore/Configuration/General/dialogs/ThematicTrimmingDialog/utils.ts
index 89569ba05c..c75d2b1b88 100644
--- a/webapp/src/components/App/Singlestudy/explore/Configuration/General/dialogs/ThematicTrimmingDialog/utils.ts
+++ b/webapp/src/components/App/Singlestudy/explore/Configuration/General/dialogs/ThematicTrimmingDialog/utils.ts
@@ -107,7 +107,7 @@ const keysMap: Record = {
opCost: "OP. COST",
mrgPrice: "MRG. PRICE",
co2Emis: "CO2 EMIS.",
- dtgByPlant: "DTG by plant",
+ dtgByPlant: "DTG BY PLANT",
balance: "BALANCE",
rowBal: "ROW BAL.",
psp: "PSP",
@@ -138,9 +138,9 @@ const keysMap: Record = {
dtgMrg: "DTG MRG",
maxMrg: "MAX MRG",
npCost: "NP COST",
- npCostByPlant: "NP Cost by plant",
+ npCostByPlant: "NP COST BY PLANT",
nodu: "NODU",
- noduByPlant: "NODU by plant",
+ noduByPlant: "NODU BY PLANT",
flowLin: "FLOW LIN.",
ucapLin: "UCAP LIN.",
loopFlow: "LOOP FLOW",
@@ -152,7 +152,7 @@ const keysMap: Record = {
congProbMinus: "CONG. PROB -",
hurdleCost: "HURDLE COST",
// Study version >= 810
- resGenerationByPlant: "RES generation by plant",
+ resGenerationByPlant: "RES GENERATION BY PLANT",
miscDtg2: "MISC. DTG 2",
miscDtg3: "MISC. DTG 3",
miscDtg4: "MISC. DTG 4",
@@ -167,39 +167,39 @@ const keysMap: Record = {
renw4: "RENW. 4",
// Study version >= 830
dens: "DENS",
- profitByPlant: "Profit by plant",
+ profitByPlant: "PROFIT BY PLANT",
// Study version >= 860
- stsInjByPlant: "STS inj by plant",
- stsWithdrawalByPlant: "STS withdrawal by plant",
- stsLvlByPlant: "STS lvl by plant",
- stsCashflowByCluster: "STS Cashflow By Cluster",
- pspOpenInjection: "PSP_open_injection",
- pspOpenWithdrawal: "PSP_open_withdrawal",
- pspOpenLevel: "PSP_open_level",
- pspClosedInjection: "PSP_closed_injection",
- pspClosedWithdrawal: "PSP_closed_withdrawal",
- pspClosedLevel: "PSP_closed_level",
- pondageInjection: "Pondage_injection",
- pondageWithdrawal: "Pondage_withdrawal",
- pondageLevel: "Pondage_level",
- batteryInjection: "Battery_injection",
- batteryWithdrawal: "Battery_withdrawal",
- batteryLevel: "Battery_level",
- other1Injection: "Other1_injection",
- other1Withdrawal: "Other1_withdrawal",
- other1Level: "Other1_level",
- other2Injection: "Other2_injection",
- other2Withdrawal: "Other2_withdrawal",
- other2Level: "Other2_level",
- other3Injection: "Other3_injection",
- other3Withdrawal: "Other3_withdrawal",
- other3Level: "Other3_level",
- other4Injection: "Other4_injection",
- other4Withdrawal: "Other4_withdrawal",
- other4Level: "Other4_level",
- other5Injection: "Other5_injection",
- other5Withdrawal: "Other5_withdrawal",
- other5Level: "Other5_level",
+ stsInjByPlant: "STS INJ BY PLANT",
+ stsWithdrawalByPlant: "STS WITHDRAWAL BY PLANT",
+ stsLvlByPlant: "STS LVL BY PLANT",
+ stsCashflowByCluster: "STS CASHFLOW BY CLUSTER",
+ pspOpenInjection: "PSP_OPEN_INJECTION",
+ pspOpenWithdrawal: "PSP_OPEN_WITHDRAWAL",
+ pspOpenLevel: "PSP_OPEN_LEVEL",
+ pspClosedInjection: "PSP_CLOSED_INJECTION",
+ pspClosedWithdrawal: "PSP_CLOSED_WITHDRAWAL",
+ pspClosedLevel: "PSP_CLOSED_LEVEL",
+ pondageInjection: "PONDAGE_INJECTION",
+ pondageWithdrawal: "PONDAGE_WITHDRAWAL",
+ pondageLevel: "PONDAGE_LEVEL",
+ batteryInjection: "BATTERY_INJECTION",
+ batteryWithdrawal: "BATTERY_WITHDRAWAL",
+ batteryLevel: "BATTERY_LEVEL",
+ other1Injection: "OTHER1_INJECTION",
+ other1Withdrawal: "OTHER1_WITHDRAWAL",
+ other1Level: "OTHER1_LEVEL",
+ other2Injection: "OTHER2_INJECTION",
+ other2Withdrawal: "OTHER2_WITHDRAWAL",
+ other2Level: "OTHER2_LEVEL",
+ other3Injection: "OTHER3_INJECTION",
+ other3Withdrawal: "OTHER3_WITHDRAWAL",
+ other3Level: "OTHER3_LEVEL",
+ other4Injection: "OTHER4_INJECTION",
+ other4Withdrawal: "OTHER4_WITHDRAWAL",
+ other4Level: "OTHER4_LEVEL",
+ other5Injection: "OTHER5_INJECTION",
+ other5Withdrawal: "OTHER5_WITHDRAWAL",
+ other5Level: "OTHER5_LEVEL",
};
// Allow to support all study versions by using directly the server config
From 02f773ccaf6de12fa7c1b09fb6a6c6ac91ba6a40 Mon Sep 17 00:00:00 2001
From: Laurent LAPORTE
Date: Thu, 11 Apr 2024 16:30:34 +0200
Subject: [PATCH 12/16] fix(configuration): version availability for "STS
Cashflow By Cluster" variable is v8.8
---
antarest/study/business/thematic_trimming_field_infos.py | 2 +-
docs/CHANGELOG.md | 1 +
tests/integration/test_integration.py | 2 --
.../General/dialogs/ThematicTrimmingDialog/utils.ts | 6 ++++--
4 files changed, 6 insertions(+), 5 deletions(-)
diff --git a/antarest/study/business/thematic_trimming_field_infos.py b/antarest/study/business/thematic_trimming_field_infos.py
index 30d95a9393..764c2c9590 100644
--- a/antarest/study/business/thematic_trimming_field_infos.py
+++ b/antarest/study/business/thematic_trimming_field_infos.py
@@ -191,7 +191,7 @@ class ThematicTrimmingFormFields(FormFieldsBaseModel, metaclass=AllOptionalMetac
"sts_inj_by_plant": {"topic": _SHORT_TERM_STORAGES, "path": "STS inj by plant", "default_value": True, "start_version": 860},
"sts_withdrawal_by_plant": {"topic": _SHORT_TERM_STORAGES, "path": "STS withdrawal by plant", "default_value": True, "start_version": 860},
"sts_lvl_by_plant": {"topic": _SHORT_TERM_STORAGES, "path": "STS lvl by plant", "default_value": True, "start_version": 860},
- "sts_cashflow_by_cluster": {"topic": _SHORT_TERM_STORAGES, "path": "STS Cashflow By Cluster", "default_value": True, "start_version": 860},
+ "sts_cashflow_by_cluster": {"topic": _SHORT_TERM_STORAGES, "path": "STS Cashflow By Cluster", "default_value": True, "start_version": 880},
# topic: "Short-Term Storages - Group"
"psp_open_injection": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "PSP_open_injection", "default_value": True, "start_version": 860},
"psp_open_withdrawal": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "PSP_open_withdrawal", "default_value": True, "start_version": 860},
diff --git a/docs/CHANGELOG.md b/docs/CHANGELOG.md
index cdecb8304c..282867d8f7 100644
--- a/docs/CHANGELOG.md
+++ b/docs/CHANGELOG.md
@@ -19,6 +19,7 @@ v2.16.8 (2024-04-19)
* **comments:** use a command to update comments on a variant [`#1959`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1959)
* **outputs (ui):** correct weekly data formatting to support 53-week years [`#1975`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1975)
* **configuration:** add missing variables in Thematic Trimming for studies in version v8.6 or above [`#1992`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1992)
+* **configuration:** version availability for "STS Cashflow By Cluster" variable is v8.8
v2.16.7 (2024-03-05)
diff --git a/tests/integration/test_integration.py b/tests/integration/test_integration.py
index d9a03481ae..c07f13c59c 100644
--- a/tests/integration/test_integration.py
+++ b/tests/integration/test_integration.py
@@ -967,7 +967,6 @@ def test_area_management(client: TestClient, admin_access_token: str, study_id:
"solarPv": True,
"solarRooft": True,
"spilEnrg": True,
- "stsCashflowByCluster": True,
"stsInjByPlant": True,
"stsLvlByPlant": True,
"stsWithdrawalByPlant": True,
@@ -1135,7 +1134,6 @@ def test_area_management(client: TestClient, admin_access_token: str, study_id:
"solarPv": True,
"solarRooft": True,
"spilEnrg": True,
- "stsCashflowByCluster": True,
"stsInjByPlant": True,
"stsLvlByPlant": True,
"stsWithdrawalByPlant": True,
diff --git a/webapp/src/components/App/Singlestudy/explore/Configuration/General/dialogs/ThematicTrimmingDialog/utils.ts b/webapp/src/components/App/Singlestudy/explore/Configuration/General/dialogs/ThematicTrimmingDialog/utils.ts
index c75d2b1b88..d05f76f289 100644
--- a/webapp/src/components/App/Singlestudy/explore/Configuration/General/dialogs/ThematicTrimmingDialog/utils.ts
+++ b/webapp/src/components/App/Singlestudy/explore/Configuration/General/dialogs/ThematicTrimmingDialog/utils.ts
@@ -72,7 +72,6 @@ export interface ThematicTrimmingFormFields {
stsInjByPlant?: boolean;
stsWithdrawalByPlant?: boolean;
stsLvlByPlant?: boolean;
- stsCashflowByCluster?: boolean;
pspOpenInjection?: boolean;
pspOpenWithdrawal?: boolean;
pspOpenLevel?: boolean;
@@ -100,6 +99,8 @@ export interface ThematicTrimmingFormFields {
other5Injection?: boolean;
other5Withdrawal?: boolean;
other5Level?: boolean;
+ // Study version >= 880
+ stsCashflowByCluster?: boolean;
}
const keysMap: Record = {
@@ -172,7 +173,6 @@ const keysMap: Record = {
stsInjByPlant: "STS INJ BY PLANT",
stsWithdrawalByPlant: "STS WITHDRAWAL BY PLANT",
stsLvlByPlant: "STS LVL BY PLANT",
- stsCashflowByCluster: "STS CASHFLOW BY CLUSTER",
pspOpenInjection: "PSP_OPEN_INJECTION",
pspOpenWithdrawal: "PSP_OPEN_WITHDRAWAL",
pspOpenLevel: "PSP_OPEN_LEVEL",
@@ -200,6 +200,8 @@ const keysMap: Record = {
other5Injection: "OTHER5_INJECTION",
other5Withdrawal: "OTHER5_WITHDRAWAL",
other5Level: "OTHER5_LEVEL",
+ // Study version >= 880
+ stsCashflowByCluster: "STS CASHFLOW BY CLUSTER",
};
// Allow to support all study versions by using directly the server config
From e812ae498921c49e6890150982a52e18e9ce1333 Mon Sep 17 00:00:00 2001
From: Laurent LAPORTE <43534797+laurent-laporte-pro@users.noreply.github.com>
Date: Fri, 12 Apr 2024 08:49:24 +0200
Subject: [PATCH 13/16] fix(launcher): upgrade the project dependencies to use
Antares-Launcher v1.3.2 (#2008)
---
docs/CHANGELOG.md | 3 +++
requirements.txt | 2 +-
2 files changed, 4 insertions(+), 1 deletion(-)
diff --git a/docs/CHANGELOG.md b/docs/CHANGELOG.md
index 282867d8f7..99a0b9265e 100644
--- a/docs/CHANGELOG.md
+++ b/docs/CHANGELOG.md
@@ -20,6 +20,9 @@ v2.16.8 (2024-04-19)
* **outputs (ui):** correct weekly data formatting to support 53-week years [`#1975`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1975)
* **configuration:** add missing variables in Thematic Trimming for studies in version v8.6 or above [`#1992`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1992)
* **configuration:** version availability for "STS Cashflow By Cluster" variable is v8.8
+* **launcher:** upgrade the project dependencies to use Antares-Launcher v1.3.2
+ - **ssh:** add retry loop around SSH Exceptions [`#68`](https://github.com/AntaresSimulatorTeam/antares-launcher/pull/68)
+ - **retriever:** avoid infinite loop when `sbatch` command fails [`#69`](https://github.com/AntaresSimulatorTeam/antares-launcher/pull/69)
v2.16.7 (2024-03-05)
diff --git a/requirements.txt b/requirements.txt
index 4e12840d32..d022e77ea3 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,4 +1,4 @@
-Antares-Launcher~=1.3.1
+Antares-Launcher~=1.3.2
alembic~=1.7.5
asgi-ratelimit[redis]==0.7.0
From 1833a0b0a6207628efcc81a694cc84e1f5773441 Mon Sep 17 00:00:00 2001
From: Laurent LAPORTE <43534797+laurent-laporte-pro@users.noreply.github.com>
Date: Tue, 16 Apr 2024 13:18:26 +0200
Subject: [PATCH 14/16] feat(config-ui): replace underscore with space in
Thematic Trimming variable names (#2010)
---
docs/CHANGELOG.md | 1 +
.../dialogs/ThematicTrimmingDialog/utils.ts | 54 +++++++++----------
2 files changed, 28 insertions(+), 27 deletions(-)
diff --git a/docs/CHANGELOG.md b/docs/CHANGELOG.md
index 99a0b9265e..e48e51eaeb 100644
--- a/docs/CHANGELOG.md
+++ b/docs/CHANGELOG.md
@@ -8,6 +8,7 @@ v2.16.8 (2024-04-19)
* **clusters:** add new endpoint for clusters duplication [`#1972`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1972)
* **configuration:** turn Thematic Trimming variable names in upper case
+* **configuration (ui):** replace underscore with space in Thematic Trimming variable names [`#2010`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/2010)
### Bug Fixes
diff --git a/webapp/src/components/App/Singlestudy/explore/Configuration/General/dialogs/ThematicTrimmingDialog/utils.ts b/webapp/src/components/App/Singlestudy/explore/Configuration/General/dialogs/ThematicTrimmingDialog/utils.ts
index d05f76f289..d5059668d3 100644
--- a/webapp/src/components/App/Singlestudy/explore/Configuration/General/dialogs/ThematicTrimmingDialog/utils.ts
+++ b/webapp/src/components/App/Singlestudy/explore/Configuration/General/dialogs/ThematicTrimmingDialog/utils.ts
@@ -173,33 +173,33 @@ const keysMap: Record = {
stsInjByPlant: "STS INJ BY PLANT",
stsWithdrawalByPlant: "STS WITHDRAWAL BY PLANT",
stsLvlByPlant: "STS LVL BY PLANT",
- pspOpenInjection: "PSP_OPEN_INJECTION",
- pspOpenWithdrawal: "PSP_OPEN_WITHDRAWAL",
- pspOpenLevel: "PSP_OPEN_LEVEL",
- pspClosedInjection: "PSP_CLOSED_INJECTION",
- pspClosedWithdrawal: "PSP_CLOSED_WITHDRAWAL",
- pspClosedLevel: "PSP_CLOSED_LEVEL",
- pondageInjection: "PONDAGE_INJECTION",
- pondageWithdrawal: "PONDAGE_WITHDRAWAL",
- pondageLevel: "PONDAGE_LEVEL",
- batteryInjection: "BATTERY_INJECTION",
- batteryWithdrawal: "BATTERY_WITHDRAWAL",
- batteryLevel: "BATTERY_LEVEL",
- other1Injection: "OTHER1_INJECTION",
- other1Withdrawal: "OTHER1_WITHDRAWAL",
- other1Level: "OTHER1_LEVEL",
- other2Injection: "OTHER2_INJECTION",
- other2Withdrawal: "OTHER2_WITHDRAWAL",
- other2Level: "OTHER2_LEVEL",
- other3Injection: "OTHER3_INJECTION",
- other3Withdrawal: "OTHER3_WITHDRAWAL",
- other3Level: "OTHER3_LEVEL",
- other4Injection: "OTHER4_INJECTION",
- other4Withdrawal: "OTHER4_WITHDRAWAL",
- other4Level: "OTHER4_LEVEL",
- other5Injection: "OTHER5_INJECTION",
- other5Withdrawal: "OTHER5_WITHDRAWAL",
- other5Level: "OTHER5_LEVEL",
+ pspOpenInjection: "PSP OPEN INJECTION",
+ pspOpenWithdrawal: "PSP OPEN WITHDRAWAL",
+ pspOpenLevel: "PSP OPEN LEVEL",
+ pspClosedInjection: "PSP CLOSED INJECTION",
+ pspClosedWithdrawal: "PSP CLOSED WITHDRAWAL",
+ pspClosedLevel: "PSP CLOSED LEVEL",
+ pondageInjection: "PONDAGE INJECTION",
+ pondageWithdrawal: "PONDAGE WITHDRAWAL",
+ pondageLevel: "PONDAGE LEVEL",
+ batteryInjection: "BATTERY INJECTION",
+ batteryWithdrawal: "BATTERY WITHDRAWAL",
+ batteryLevel: "BATTERY LEVEL",
+ other1Injection: "OTHER1 INJECTION",
+ other1Withdrawal: "OTHER1 WITHDRAWAL",
+ other1Level: "OTHER1 LEVEL",
+ other2Injection: "OTHER2 INJECTION",
+ other2Withdrawal: "OTHER2 WITHDRAWAL",
+ other2Level: "OTHER2 LEVEL",
+ other3Injection: "OTHER3 INJECTION",
+ other3Withdrawal: "OTHER3 WITHDRAWAL",
+ other3Level: "OTHER3 LEVEL",
+ other4Injection: "OTHER4 INJECTION",
+ other4Withdrawal: "OTHER4 WITHDRAWAL",
+ other4Level: "OTHER4 LEVEL",
+ other5Injection: "OTHER5 INJECTION",
+ other5Withdrawal: "OTHER5 WITHDRAWAL",
+ other5Level: "OTHER5 LEVEL",
// Study version >= 880
stsCashflowByCluster: "STS CASHFLOW BY CLUSTER",
};
From 0c4761055ed53f6566111cdd678ce050549af555 Mon Sep 17 00:00:00 2001
From: Samir Kamal <1954121+skamril@users.noreply.github.com>
Date: Fri, 19 Apr 2024 17:44:30 +0200
Subject: [PATCH 15/16] feat(clusters-ui): implement new duplication endpoint
and optimistic update (#1984)
---
docs/CHANGELOG.md | 3 +
webapp/public/locales/en/main.json | 10 +-
webapp/public/locales/fr/main.json | 10 +-
.../Modelization/Areas/Renewables/Fields.tsx | 2 +-
.../Modelization/Areas/Renewables/index.tsx | 227 ++++-----
.../Modelization/Areas/Renewables/utils.ts | 70 +--
.../Modelization/Areas/Storages/Fields.tsx | 2 +-
.../Modelization/Areas/Storages/index.tsx | 207 +++-----
.../Modelization/Areas/Storages/utils.ts | 73 ++-
.../Modelization/Areas/Thermal/Fields.tsx | 2 +-
.../Modelization/Areas/Thermal/index.tsx | 241 ++++-----
.../Modelization/Areas/Thermal/utils.ts | 70 +--
.../Areas/common/clustersUtils.ts | 83 +++
.../Modelization/Areas/common/utils.ts | 118 -----
.../App/Singlestudy/explore/TabWrapper.tsx | 1 -
.../common/GroupedDataTable/CreateDialog.tsx | 46 +-
.../GroupedDataTable/DuplicateDialog.tsx | 4 +-
.../cellRenderers/BooleanCell.tsx | 22 +
.../common/GroupedDataTable/index.tsx | 482 ++++++++++++------
.../common/GroupedDataTable/types.ts | 4 +
.../common/GroupedDataTable/utils.ts | 41 +-
.../src/hooks/useOperationInProgressCount.ts | 51 ++
webapp/src/hooks/useUpdateEffectOnce.ts | 23 +
webapp/src/i18n.ts | 61 +--
webapp/src/index.tsx | 3 -
webapp/src/utils/fnUtils.ts | 19 +
webapp/src/utils/i18nUtils.ts | 22 +
webapp/src/utils/tsUtils.ts | 13 +
webapp/src/utils/validationUtils.ts | 182 +++++++
29 files changed, 1231 insertions(+), 861 deletions(-)
create mode 100644 webapp/src/components/App/Singlestudy/explore/Modelization/Areas/common/clustersUtils.ts
delete mode 100644 webapp/src/components/App/Singlestudy/explore/Modelization/Areas/common/utils.ts
create mode 100644 webapp/src/components/common/GroupedDataTable/cellRenderers/BooleanCell.tsx
create mode 100644 webapp/src/components/common/GroupedDataTable/types.ts
create mode 100644 webapp/src/hooks/useOperationInProgressCount.ts
create mode 100644 webapp/src/hooks/useUpdateEffectOnce.ts
create mode 100644 webapp/src/utils/i18nUtils.ts
create mode 100644 webapp/src/utils/validationUtils.ts
diff --git a/docs/CHANGELOG.md b/docs/CHANGELOG.md
index e48e51eaeb..aa70beb064 100644
--- a/docs/CHANGELOG.md
+++ b/docs/CHANGELOG.md
@@ -7,11 +7,14 @@ v2.16.8 (2024-04-19)
### Features
* **clusters:** add new endpoint for clusters duplication [`#1972`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1972)
+* **clusters (ui):** implement new duplication endpoint and optimistic update [`#1984`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1984)
* **configuration:** turn Thematic Trimming variable names in upper case
* **configuration (ui):** replace underscore with space in Thematic Trimming variable names [`#2010`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/2010)
### Bug Fixes
+* **clusters (ui):** totals are updated after a duplication and a deletion [`#1984`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1984)
+* **clusters (ui):** issue with selecting and deleting rows [`#1984`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1984)
* **st-storages (ui):** correction of incorrect wording between "withdrawal" and "injection" [`#1977`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1977)
* **st-storages (ui):** change matrix titles [`#1994`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1994)
* **st-storages:** use command when updating matrices [`#1971`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1971)
diff --git a/webapp/public/locales/en/main.json b/webapp/public/locales/en/main.json
index b64b0d86bb..ac8bbf70a4 100644
--- a/webapp/public/locales/en/main.json
+++ b/webapp/public/locales/en/main.json
@@ -66,6 +66,7 @@
"global.assign": "Assign",
"global.undo": "Undo",
"global.redo": "Redo",
+ "global.total": "Total",
"global.time.hourly": "Hourly",
"global.time.daily": "Daily",
"global.time.weekly": "Weekly",
@@ -76,6 +77,8 @@
"global.error.failedtoretrievejobs": "Failed to retrieve job information",
"global.error.failedtoretrievelogs": "Failed to retrieve job logs",
"global.error.failedtoretrievedownloads": "Failed to retrieve downloads list",
+ "global.error.create": "Creation failed",
+ "global.error.delete": "Deletion failed",
"global.area.add": "Add an area",
"login.error": "Failed to authenticate",
"tasks.title": "Tasks",
@@ -89,6 +92,7 @@
"data.title": "Data",
"dialog.title.confirmation": "Confirmation",
"dialog.message.logout": "Are you sure you want to logout?",
+ "dialog.message.confirmDelete": "Do you confirm the deletion?",
"button.collapse": "Collapse",
"button.expand": "Expand",
"button.yes": "Yes",
@@ -112,7 +116,7 @@
"form.submit.inProgress": "The form is being submitted. Are you sure you want to leave the page?",
"form.asyncDefaultValues.error": "Failed to get values",
"form.field.required": "Field required",
- "form.field.duplicate": "Value already exists: {{0}}",
+ "form.field.duplicate": "Value already exists",
"form.field.minLength": "{{0}} character(s) minimum",
"form.field.minValue": "The minimum value is {{0}}",
"form.field.maxValue": "The maximum value is {{0}}",
@@ -475,8 +479,8 @@
"study.modelization.clusters.matrix.timeSeries": "Time-Series",
"study.modelization.clusters.backClusterList": "Back to cluster list",
"study.modelization.clusters.tsInterpretation": "TS interpretation",
- "study.modelization.clusters.group": "Group",
- "studies.modelization.clusters.question.delete": "Are you sure you want to delete this cluster?",
+ "studies.modelization.clusters.question.delete_one": "Are you sure you want to delete this cluster?",
+ "studies.modelization.clusters.question.delete_other": "Are you sure you want to delete these {{count}} clusters?",
"study.modelization.bindingConst.comments": "Comments",
"study.modelization.bindingConst.type": "Type",
"study.modelization.bindingConst.constraints": "Constraints",
diff --git a/webapp/public/locales/fr/main.json b/webapp/public/locales/fr/main.json
index cf8577422e..8764a551b0 100644
--- a/webapp/public/locales/fr/main.json
+++ b/webapp/public/locales/fr/main.json
@@ -66,6 +66,7 @@
"global.assign": "Assigner",
"global.undo": "Annuler",
"global.redo": "Rétablir",
+ "global.total": "Total",
"global.time.hourly": "Horaire",
"global.time.daily": "Journalier",
"global.time.weekly": "Hebdomadaire",
@@ -76,6 +77,8 @@
"global.error.failedtoretrievejobs": "Échec de la récupération des tâches",
"global.error.failedtoretrievelogs": "Échec de la récupération des logs",
"global.error.failedtoretrievedownloads": "Échec de la récupération des exports",
+ "global.error.create": "La création a échoué",
+ "global.error.delete": "La suppression a échoué",
"global.area.add": "Ajouter une zone",
"login.error": "Échec de l'authentification",
"tasks.title": "Tâches",
@@ -89,6 +92,7 @@
"data.title": "Données",
"dialog.title.confirmation": "Confirmation",
"dialog.message.logout": "Êtes vous sûr de vouloir vous déconnecter ?",
+ "dialog.message.confirmDelete": "Confirmez-vous la suppression ?",
"button.collapse": "Réduire",
"button.expand": "Étendre",
"button.yes": "Oui",
@@ -112,7 +116,7 @@
"form.submit.inProgress": "Le formulaire est en cours de soumission. Etes-vous sûr de vouloir quitter la page ?",
"form.asyncDefaultValues.error": "Impossible d'obtenir les valeurs",
"form.field.required": "Champ requis",
- "form.field.duplicate": "Cette valeur existe déjà: {{0}}",
+ "form.field.duplicate": "Cette valeur existe déjà",
"form.field.minLength": "{{0}} caractère(s) minimum",
"form.field.minValue": "La valeur minimum est {{0}}",
"form.field.maxValue": "La valeur maximum est {{0}}",
@@ -475,8 +479,8 @@
"study.modelization.clusters.matrix.timeSeries": "Séries temporelles",
"study.modelization.clusters.backClusterList": "Retour à la liste des clusters",
"study.modelization.clusters.tsInterpretation": "TS interpretation",
- "study.modelization.clusters.group": "Groupes",
- "studies.modelization.clusters.question.delete": "Êtes-vous sûr de vouloir supprimer ce cluster ?",
+ "studies.modelization.clusters.question.delete_one": "Êtes-vous sûr de vouloir supprimer ce cluster ?",
+ "studies.modelization.clusters.question.delete_other": "Êtes-vous sûr de vouloir supprimer ces {{count}} clusters ?",
"study.modelization.bindingConst.comments": "Commentaires",
"study.modelization.bindingConst.type": "Type",
"study.modelization.bindingConst.constraints": "Contraintes",
diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Renewables/Fields.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Renewables/Fields.tsx
index 16e45f7c53..7b7ea9774c 100644
--- a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Renewables/Fields.tsx
+++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Renewables/Fields.tsx
@@ -29,7 +29,7 @@ function Fields() {
disabled
/>
();
function Renewables() {
const { study } = useOutletContext<{ study: StudyMetadata }>();
- const [t] = useTranslation();
- const areaId = useAppSelector(getCurrentAreaId);
+ const { t } = useTranslation();
const navigate = useNavigate();
const location = useLocation();
+ const areaId = useAppSelector(getCurrentAreaId);
- const {
- clusters,
- clustersWithCapacity,
- totalUnitCount,
- totalInstalledCapacity,
- totalEnabledCapacity,
- } = useClusterDataWithCapacity(
- () => getRenewableClusters(study.id, areaId),
- t("studies.error.retrieveData"),
- [study.id, areaId],
- );
-
- const columns = useMemo>>(
- () => [
- {
- accessorKey: "name",
- header: "Name",
- muiTableHeadCellProps: {
- align: "left",
- },
- muiTableBodyCellProps: {
- align: "left",
- },
- size: 100,
- Cell: ({ renderedCellValue, row }) => {
- const clusterId = row.original.id;
- return (
- navigate(`${location.pathname}/${clusterId}`)}
- >
- {renderedCellValue}
-
- );
- },
+ const { data: clustersWithCapacity = [], isLoading } =
+ usePromiseWithSnackbarError(
+ async () => {
+ const clusters = await getRenewableClusters(study.id, areaId);
+ return clusters?.map(addClusterCapacity);
},
{
- accessorKey: "group",
- header: "Group",
- size: 50,
- filterVariant: "select",
- filterSelectOptions: [...RENEWABLE_GROUPS],
- muiTableHeadCellProps: {
- align: "left",
- },
- muiTableBodyCellProps: {
- align: "left",
- },
- Footer: () => (
- Total:
- ),
+ resetDataOnReload: true,
+ errorMessage: t("studies.error.retrieveData"),
+ deps: [study.id, areaId],
},
- {
- accessorKey: "enabled",
+ );
+
+ const [totals, setTotals] = useState(
+ getClustersWithCapacityTotals(clustersWithCapacity),
+ );
+
+ const columns = useMemo(() => {
+ const { totalUnitCount, totalEnabledCapacity, totalInstalledCapacity } =
+ totals;
+
+ return [
+ columnHelper.accessor("enabled", {
header: "Enabled",
size: 50,
filterVariant: "checkbox",
- Cell: ({ cell }) => (
- () ? t("button.yes") : t("button.no")}
- color={cell.getValue() ? "success" : "error"}
- size="small"
- sx={{ minWidth: 40 }}
- />
- ),
- },
- {
- accessorKey: "tsInterpretation",
+ Cell: BooleanCell,
+ }),
+ columnHelper.accessor("tsInterpretation", {
header: "TS Interpretation",
size: 50,
- },
- {
- accessorKey: "unitCount",
+ }),
+ columnHelper.accessor("unitCount", {
header: "Unit Count",
size: 50,
aggregationFn: "sum",
AggregatedCell: ({ cell }) => (
- {cell.getValue()}
+ {cell.getValue()}
),
Footer: () => {totalUnitCount},
- },
- {
- accessorKey: "nominalCapacity",
+ }),
+ columnHelper.accessor("nominalCapacity", {
header: "Nominal Capacity (MW)",
- size: 200,
- Cell: ({ cell }) => Math.floor(cell.getValue()),
- },
- {
- accessorKey: "installedCapacity",
+ size: 220,
+ Cell: ({ cell }) => Math.floor(cell.getValue()),
+ }),
+ columnHelper.accessor("installedCapacity", {
header: "Enabled / Installed (MW)",
- size: 200,
+ size: 220,
aggregationFn: capacityAggregationFn(),
AggregatedCell: ({ cell }) => (
- {cell.getValue() ?? ""}
+ {cell.getValue() ?? ""}
),
Cell: ({ row }) => (
<>
- {Math.floor(row.original.enabledCapacity ?? 0)} /{" "}
- {Math.floor(row.original.installedCapacity ?? 0)}
+ {Math.floor(row.original.enabledCapacity)} /{" "}
+ {Math.floor(row.original.installedCapacity)}
>
),
Footer: () => (
@@ -146,53 +102,68 @@ function Renewables() {
{totalEnabledCapacity} / {totalInstalledCapacity}
),
- },
- ],
- [
- location.pathname,
- navigate,
- t,
- totalEnabledCapacity,
- totalInstalledCapacity,
- totalUnitCount,
- ],
- );
+ }),
+ ];
+ }, [totals]);
////////////////////////////////////////////////////////////////
// Event handlers
////////////////////////////////////////////////////////////////
- const handleCreateRow = ({
- id,
- installedCapacity,
- enabledCapacity,
- ...cluster
- }: RenewableClusterWithCapacity) => {
- return createRenewableCluster(study.id, areaId, cluster);
+ const handleCreate = async (values: TRow) => {
+ const cluster = await createRenewableCluster(study.id, areaId, values);
+ return addClusterCapacity(cluster);
};
- const handleDeleteSelection = (ids: string[]) => {
+ const handleDuplicate = async (
+ row: RenewableClusterWithCapacity,
+ newName: string,
+ ) => {
+ const cluster = await duplicateRenewableCluster(
+ study.id,
+ areaId,
+ row.id,
+ newName,
+ );
+
+ return { ...row, ...cluster };
+ };
+
+ const handleDelete = (rows: RenewableClusterWithCapacity[]) => {
+ const ids = rows.map((row) => row.id);
return deleteRenewableClusters(study.id, areaId, ids);
};
+ const handleNameClick = (row: RenewableClusterWithCapacity) => {
+ navigate(`${location.pathname}/${row.id}`);
+ };
+
////////////////////////////////////////////////////////////////
// JSX
////////////////////////////////////////////////////////////////
return (
- }
- ifResolved={() => (
-
- )}
- ifRejected={(error) => }
+
+ t("studies.modelization.clusters.question.delete", { count })
+ }
+ fillPendingRow={(row) => ({
+ unitCount: 0,
+ enabledCapacity: 0,
+ installedCapacity: 0,
+ ...row,
+ })}
+ onDataChange={(data) => {
+ setTotals(getClustersWithCapacityTotals(data));
+ }}
/>
);
}
diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Renewables/utils.ts b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Renewables/utils.ts
index 074a19c84f..0c0418d8d3 100644
--- a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Renewables/utils.ts
+++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Renewables/utils.ts
@@ -4,6 +4,8 @@ import {
StudyMetadata,
} from "../../../../../../../common/types";
import client from "../../../../../../../services/api/client";
+import type { PartialExceptFor } from "../../../../../../../utils/tsUtils";
+import type { ClusterWithCapacity } from "../common/clustersUtils";
////////////////////////////////////////////////////////////////
// Constants
@@ -30,8 +32,9 @@ export const TS_INTERPRETATION_OPTIONS = [
// Types
////////////////////////////////////////////////////////////////
+export type RenewableGroup = (typeof RENEWABLE_GROUPS)[number];
+
type TimeSeriesInterpretation = (typeof TS_INTERPRETATION_OPTIONS)[number];
-type RenewableGroup = (typeof RENEWABLE_GROUPS)[number];
export interface RenewableFormFields {
name: string;
@@ -52,10 +55,8 @@ export interface RenewableCluster {
nominalCapacity: number;
}
-export interface RenewableClusterWithCapacity extends RenewableCluster {
- installedCapacity: number;
- enabledCapacity: number;
-}
+export type RenewableClusterWithCapacity =
+ ClusterWithCapacity;
////////////////////////////////////////////////////////////////
// Functions
@@ -72,34 +73,29 @@ const getClusterUrl = (
clusterId: Cluster["id"],
): string => `${getClustersUrl(studyId, areaId)}/${clusterId}`;
-async function makeRequest(
- method: "get" | "post" | "patch" | "delete",
- url: string,
- data?: Partial | { data: Array },
-): Promise {
- const res = await client[method](url, data);
- return res.data;
-}
+////////////////////////////////////////////////////////////////
+// API
+////////////////////////////////////////////////////////////////
export async function getRenewableClusters(
studyId: StudyMetadata["id"],
areaId: Area["name"],
-): Promise {
- return makeRequest(
- "get",
+) {
+ const res = await client.get(
getClustersUrl(studyId, areaId),
);
+ return res.data;
}
export async function getRenewableCluster(
studyId: StudyMetadata["id"],
areaId: Area["name"],
clusterId: Cluster["id"],
-): Promise {
- return makeRequest(
- "get",
+) {
+ const res = await client.get(
getClusterUrl(studyId, areaId, clusterId),
);
+ return res.data;
}
export async function updateRenewableCluster(
@@ -107,32 +103,44 @@ export async function updateRenewableCluster(
areaId: Area["name"],
clusterId: Cluster["id"],
data: Partial,
-): Promise {
- return makeRequest(
- "patch",
+) {
+ const res = await client.patch(
getClusterUrl(studyId, areaId, clusterId),
data,
);
+ return res.data;
}
export async function createRenewableCluster(
studyId: StudyMetadata["id"],
areaId: Area["name"],
- data: Partial,
-): Promise {
- return makeRequest(
- "post",
+ data: PartialExceptFor,
+) {
+ const res = await client.post(
getClustersUrl(studyId, areaId),
data,
);
+ return res.data;
+}
+
+export async function duplicateRenewableCluster(
+ studyId: StudyMetadata["id"],
+ areaId: Area["name"],
+ sourceClusterId: RenewableCluster["id"],
+ newName: RenewableCluster["name"],
+) {
+ const res = await client.post(
+ `/v1/studies/${studyId}/areas/${areaId}/renewables/${sourceClusterId}`,
+ null,
+ { params: { newName } },
+ );
+ return res.data;
}
-export function deleteRenewableClusters(
+export async function deleteRenewableClusters(
studyId: StudyMetadata["id"],
areaId: Area["name"],
clusterIds: Array,
-): Promise {
- return makeRequest("delete", getClustersUrl(studyId, areaId), {
- data: clusterIds,
- });
+) {
+ await client.delete(getClustersUrl(studyId, areaId), { data: clusterIds });
}
diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/Fields.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/Fields.tsx
index 8485fd29e6..9d6935b3fe 100644
--- a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/Fields.tsx
+++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/Fields.tsx
@@ -25,7 +25,7 @@ function Fields() {
disabled
/>
();
function Storages() {
const { study } = useOutletContext<{ study: StudyMetadata }>();
- const [t] = useTranslation();
+ const { t } = useTranslation();
const navigate = useNavigate();
const location = useLocation();
const areaId = useAppSelector(getCurrentAreaId);
- const storages = usePromiseWithSnackbarError(
+ const { data: storages = [], isLoading } = usePromiseWithSnackbarError(
() => getStorages(study.id, areaId),
{
+ resetDataOnReload: true,
errorMessage: t("studies.error.retrieveData"),
deps: [study.id, areaId],
},
);
- const { totalWithdrawalNominalCapacity, totalInjectionNominalCapacity } =
- useMemo(() => {
- if (!storages.data) {
- return {
- totalWithdrawalNominalCapacity: 0,
- totalInjectionNominalCapacity: 0,
- };
- }
+ const [totals, setTotals] = useState(getStoragesTotals(storages));
- return storages.data.reduce(
- (acc, { withdrawalNominalCapacity, injectionNominalCapacity }) => {
- acc.totalWithdrawalNominalCapacity += withdrawalNominalCapacity;
- acc.totalInjectionNominalCapacity += injectionNominalCapacity;
- return acc;
- },
- {
- totalWithdrawalNominalCapacity: 0,
- totalInjectionNominalCapacity: 0,
- },
- );
- }, [storages]);
+ const columns = useMemo(() => {
+ const { totalInjectionNominalCapacity, totalWithdrawalNominalCapacity } =
+ totals;
- const columns = useMemo>>(
- () => [
- {
- accessorKey: "name",
- header: t("global.name"),
- muiTableHeadCellProps: {
- align: "left",
- },
- muiTableBodyCellProps: {
- align: "left",
- },
- size: 100,
- Cell: ({ renderedCellValue, row }) => {
- const storageId = row.original.id;
- return (
- navigate(`${location.pathname}/${storageId}`)}
- >
- {renderedCellValue}
-
- );
- },
- },
- {
- accessorKey: "group",
- header: t("global.group"),
- size: 50,
- filterVariant: "select",
- filterSelectOptions: [...STORAGE_GROUPS],
- muiTableHeadCellProps: {
- align: "left",
- },
- muiTableBodyCellProps: {
- align: "left",
- },
- Footer: () => (
- Total:
- ),
- },
- {
- accessorKey: "injectionNominalCapacity",
+ return [
+ columnHelper.accessor("injectionNominalCapacity", {
header: t("study.modelization.storages.injectionNominalCapacity"),
Header: ({ column }) => (
),
size: 100,
- Cell: ({ cell }) => Math.floor(cell.getValue()),
+ aggregationFn: "sum",
AggregatedCell: ({ cell }) => (
- {Math.floor(cell.getValue())}
+ {Math.floor(cell.getValue())}
),
+ Cell: ({ cell }) => Math.floor(cell.getValue()),
Footer: () => (
{Math.floor(totalInjectionNominalCapacity)}
),
- },
- {
- accessorKey: "withdrawalNominalCapacity",
+ }),
+ columnHelper.accessor("withdrawalNominalCapacity", {
header: t("study.modelization.storages.withdrawalNominalCapacity"),
Header: ({ column }) => (
(
- {Math.floor(cell.getValue())}
+ {Math.floor(cell.getValue())}
),
- Cell: ({ cell }) => Math.floor(cell.getValue()),
+ Cell: ({ cell }) => Math.floor(cell.getValue()),
Footer: () => (
{Math.floor(totalWithdrawalNominalCapacity)}
),
- },
- {
- accessorKey: "reservoirCapacity",
+ }),
+ columnHelper.accessor("reservoirCapacity", {
header: t("study.modelization.storages.reservoirCapacity"),
Header: ({ column }) => (
),
size: 100,
- Cell: ({ cell }) => `${cell.getValue()}`,
- },
- {
- accessorKey: "efficiency",
+ Cell: ({ cell }) => `${cell.getValue()}`,
+ }),
+ columnHelper.accessor("efficiency", {
header: t("study.modelization.storages.efficiency"),
size: 50,
- Cell: ({ cell }) => `${Math.floor(cell.getValue() * 100)}`,
- },
- {
- accessorKey: "initialLevel",
+ Cell: ({ cell }) => `${Math.floor(cell.getValue() * 100)}`,
+ }),
+ columnHelper.accessor("initialLevel", {
header: t("study.modelization.storages.initialLevel"),
size: 50,
- Cell: ({ cell }) => `${Math.floor(cell.getValue() * 100)}`,
- },
- {
- accessorKey: "initialLevelOptim",
+ Cell: ({ cell }) => `${Math.floor(cell.getValue() * 100)}`,
+ }),
+ columnHelper.accessor("initialLevelOptim", {
header: t("study.modelization.storages.initialLevelOptim"),
- size: 180,
+ size: 200,
filterVariant: "checkbox",
- Cell: ({ cell }) => (
- () ? t("button.yes") : t("button.no")}
- color={cell.getValue() ? "success" : "error"}
- size="small"
- sx={{ minWidth: 40 }}
- />
- ),
- },
- ],
- [
- location.pathname,
- navigate,
- t,
- totalInjectionNominalCapacity,
- totalWithdrawalNominalCapacity,
- ],
- );
+ Cell: BooleanCell,
+ }),
+ ];
+ }, [t, totals]);
////////////////////////////////////////////////////////////////
// Event handlers
////////////////////////////////////////////////////////////////
- const handleCreateRow = ({ id, ...storage }: Storage) => {
- return createStorage(study.id, areaId, storage);
+ const handleCreate = (values: TRow) => {
+ return createStorage(study.id, areaId, values);
+ };
+
+ const handleDuplicate = (row: Storage, newName: string) => {
+ return duplicateStorage(study.id, areaId, row.id, newName);
};
- const handleDeleteSelection = (ids: string[]) => {
+ const handleDelete = (rows: Storage[]) => {
+ const ids = rows.map((row) => row.id);
return deleteStorages(study.id, areaId, ids);
};
+ const handleNameClick = (row: Storage) => {
+ navigate(`${location.pathname}/${row.id}`);
+ };
+
////////////////////////////////////////////////////////////////
// JSX
////////////////////////////////////////////////////////////////
return (
- }
- ifResolved={(data) => (
-
- )}
- ifRejected={(error) => }
+
+ t("studies.modelization.clusters.question.delete", { count })
+ }
+ fillPendingRow={(row) => ({
+ withdrawalNominalCapacity: 0,
+ injectionNominalCapacity: 0,
+ ...row,
+ })}
+ onDataChange={(data) => {
+ setTotals(getStoragesTotals(data));
+ }}
/>
);
}
diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/utils.ts b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/utils.ts
index 1226bcac66..48466da646 100644
--- a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/utils.ts
+++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/utils.ts
@@ -1,5 +1,6 @@
import { StudyMetadata, Area } from "../../../../../../../common/types";
import client from "../../../../../../../services/api/client";
+import type { PartialExceptFor } from "../../../../../../../utils/tsUtils";
////////////////////////////////////////////////////////////////
// Constants
@@ -39,6 +40,20 @@ export interface Storage {
// Functions
////////////////////////////////////////////////////////////////
+export function getStoragesTotals(storages: Storage[]) {
+ return storages.reduce(
+ (acc, { withdrawalNominalCapacity, injectionNominalCapacity }) => {
+ acc.totalWithdrawalNominalCapacity += withdrawalNominalCapacity;
+ acc.totalInjectionNominalCapacity += injectionNominalCapacity;
+ return acc;
+ },
+ {
+ totalWithdrawalNominalCapacity: 0,
+ totalInjectionNominalCapacity: 0,
+ },
+ );
+}
+
const getStoragesUrl = (
studyId: StudyMetadata["id"],
areaId: Area["name"],
@@ -50,28 +65,27 @@ const getStorageUrl = (
storageId: Storage["id"],
): string => `${getStoragesUrl(studyId, areaId)}/${storageId}`;
-async function makeRequest(
- method: "get" | "post" | "patch" | "delete",
- url: string,
- data?: Partial | { data: Array },
-): Promise {
- const res = await client[method](url, data);
- return res.data;
-}
+////////////////////////////////////////////////////////////////
+// API
+////////////////////////////////////////////////////////////////
export async function getStorages(
studyId: StudyMetadata["id"],
areaId: Area["name"],
-): Promise {
- return makeRequest("get", getStoragesUrl(studyId, areaId));
+) {
+ const res = await client.get(getStoragesUrl(studyId, areaId));
+ return res.data;
}
export async function getStorage(
studyId: StudyMetadata["id"],
areaId: Area["name"],
storageId: Storage["id"],
-): Promise {
- return makeRequest("get", getStorageUrl(studyId, areaId, storageId));
+) {
+ const res = await client.get(
+ getStorageUrl(studyId, areaId, storageId),
+ );
+ return res.data;
}
export async function updateStorage(
@@ -79,28 +93,41 @@ export async function updateStorage(
areaId: Area["name"],
storageId: Storage["id"],
data: Partial,
-): Promise {
- return makeRequest(
- "patch",
+) {
+ const res = await client.patch(
getStorageUrl(studyId, areaId, storageId),
data,
);
+ return res.data;
}
export async function createStorage(
studyId: StudyMetadata["id"],
areaId: Area["name"],
- data: Partial,
-): Promise {
- return makeRequest("post", getStoragesUrl(studyId, areaId), data);
+ data: PartialExceptFor,
+) {
+ const res = await client.post(getStoragesUrl(studyId, areaId), data);
+ return res.data;
+}
+
+export async function duplicateStorage(
+ studyId: StudyMetadata["id"],
+ areaId: Area["name"],
+ sourceClusterId: Storage["id"],
+ newName: Storage["name"],
+) {
+ const res = await client.post(
+ `/v1/studies/${studyId}/areas/${areaId}/storages/${sourceClusterId}`,
+ null,
+ { params: { newName } },
+ );
+ return res.data;
}
-export function deleteStorages(
+export async function deleteStorages(
studyId: StudyMetadata["id"],
areaId: Area["name"],
storageIds: Array,
-): Promise {
- return makeRequest("delete", getStoragesUrl(studyId, areaId), {
- data: storageIds,
- });
+) {
+ await client.delete(getStoragesUrl(studyId, areaId), { data: storageIds });
}
diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/Fields.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/Fields.tsx
index cf5cb2fc66..ec5d6fc632 100644
--- a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/Fields.tsx
+++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/Fields.tsx
@@ -35,7 +35,7 @@ function Fields() {
disabled
/>
();
function Thermal() {
const { study } = useOutletContext<{ study: StudyMetadata }>();
- const [t] = useTranslation();
+ const { t } = useTranslation();
const navigate = useNavigate();
const location = useLocation();
const areaId = useAppSelector(getCurrentAreaId);
- const {
- clusters,
- clustersWithCapacity,
- totalUnitCount,
- totalInstalledCapacity,
- totalEnabledCapacity,
- } = useClusterDataWithCapacity(
- () => getThermalClusters(study.id, areaId),
- t("studies.error.retrieveData"),
- [study.id, areaId],
- );
-
- const columns = useMemo>>(
- () => [
- {
- accessorKey: "name",
- header: "Name",
- size: 100,
- muiTableHeadCellProps: {
- align: "left",
- },
- muiTableBodyCellProps: {
- align: "left",
- },
- Cell: ({ renderedCellValue, row }) => {
- const clusterId = row.original.id;
- return (
- navigate(`${location.pathname}/${clusterId}`)}
- >
- {renderedCellValue}
-
- );
- },
+ const { data: clustersWithCapacity = [], isLoading } =
+ usePromiseWithSnackbarError(
+ async () => {
+ const clusters = await getThermalClusters(study.id, areaId);
+ return clusters?.map(addClusterCapacity);
},
{
- accessorKey: "group",
- header: "Group",
- size: 50,
- filterVariant: "select",
- filterSelectOptions: [...THERMAL_GROUPS],
- muiTableHeadCellProps: {
- align: "left",
- },
- muiTableBodyCellProps: {
- align: "left",
- },
- Footer: () => (
- Total:
- ),
+ resetDataOnReload: true,
+ errorMessage: t("studies.error.retrieveData"),
+ deps: [study.id, areaId],
},
- {
- accessorKey: "enabled",
+ );
+
+ const [totals, setTotals] = useState(
+ getClustersWithCapacityTotals(clustersWithCapacity),
+ );
+
+ const columns = useMemo(() => {
+ const { totalUnitCount, totalEnabledCapacity, totalInstalledCapacity } =
+ totals;
+
+ return [
+ columnHelper.accessor("enabled", {
header: "Enabled",
size: 50,
filterVariant: "checkbox",
- Cell: ({ cell }) => (
- () ? t("button.yes") : t("button.no")}
- color={cell.getValue() ? "success" : "error"}
- size="small"
- sx={{ minWidth: 40 }}
- />
- ),
- },
- {
- accessorKey: "mustRun",
+ Cell: BooleanCell,
+ }),
+ columnHelper.accessor("mustRun", {
header: "Must Run",
size: 50,
filterVariant: "checkbox",
- Cell: ({ cell }) => (
- () ? t("button.yes") : t("button.no")}
- color={cell.getValue() ? "success" : "error"}
- size="small"
- sx={{ minWidth: 40 }}
- />
- ),
- },
- {
- accessorKey: "unitCount",
+ Cell: BooleanCell,
+ }),
+ columnHelper.accessor("unitCount", {
header: "Unit Count",
size: 50,
aggregationFn: "sum",
AggregatedCell: ({ cell }) => (
- {cell.getValue()}
+ {cell.getValue()}
),
Footer: () => {totalUnitCount},
- },
- {
- accessorKey: "nominalCapacity",
+ }),
+ columnHelper.accessor("nominalCapacity", {
header: "Nominal Capacity (MW)",
- size: 200,
- Cell: ({ cell }) => cell.getValue().toFixed(1),
- },
- {
- accessorKey: "installedCapacity",
+ size: 220,
+ Cell: ({ cell }) => cell.getValue().toFixed(1),
+ }),
+ columnHelper.accessor("installedCapacity", {
header: "Enabled / Installed (MW)",
- size: 200,
+ size: 220,
aggregationFn: capacityAggregationFn(),
AggregatedCell: ({ cell }) => (
- {cell.getValue() ?? ""}
+ {cell.getValue() ?? ""}
),
Cell: ({ row }) => (
<>
- {Math.floor(row.original.enabledCapacity ?? 0)} /{" "}
- {Math.floor(row.original.installedCapacity ?? 0)}
+ {Math.floor(row.original.enabledCapacity)} /{" "}
+ {Math.floor(row.original.installedCapacity)}
>
),
Footer: () => (
@@ -155,59 +104,73 @@ function Thermal() {
{totalEnabledCapacity} / {totalInstalledCapacity}
),
- },
- {
- accessorKey: "marketBidCost",
+ }),
+ columnHelper.accessor("marketBidCost", {
header: "Market Bid (€/MWh)",
size: 50,
- Cell: ({ cell }) => <>{cell.getValue().toFixed(2)}>,
- },
- ],
- [
- location.pathname,
- navigate,
- t,
- totalEnabledCapacity,
- totalInstalledCapacity,
- totalUnitCount,
- ],
- );
+ Cell: ({ cell }) => <>{cell.getValue().toFixed(2)}>,
+ }),
+ ];
+ }, [totals]);
////////////////////////////////////////////////////////////////
// Event handlers
////////////////////////////////////////////////////////////////
- const handleCreateRow = ({
- id,
- installedCapacity,
- enabledCapacity,
- ...cluster
- }: ThermalClusterWithCapacity) => {
- return createThermalCluster(study.id, areaId, cluster);
+ const handleCreate = async (values: TRow) => {
+ const cluster = await createThermalCluster(study.id, areaId, values);
+ return addClusterCapacity(cluster);
};
- const handleDeleteSelection = (ids: string[]) => {
+ const handleDuplicate = async (
+ row: ThermalClusterWithCapacity,
+ newName: string,
+ ) => {
+ const cluster = await duplicateThermalCluster(
+ study.id,
+ areaId,
+ row.id,
+ newName,
+ );
+
+ return { ...row, ...cluster };
+ };
+
+ const handleDelete = (rows: ThermalClusterWithCapacity[]) => {
+ const ids = rows.map((row) => row.id);
return deleteThermalClusters(study.id, areaId, ids);
};
+ const handleNameClick = (row: ThermalClusterWithCapacity) => {
+ navigate(`${location.pathname}/${row.id}`);
+ };
+
////////////////////////////////////////////////////////////////
// JSX
////////////////////////////////////////////////////////////////
return (
- }
- ifResolved={() => (
-
- )}
- ifRejected={(error) => }
+
+ t("studies.modelization.clusters.question.delete", { count })
+ }
+ fillPendingRow={(row) => ({
+ unitCount: 0,
+ enabledCapacity: 0,
+ installedCapacity: 0,
+ ...row,
+ })}
+ onDataChange={(data) => {
+ setTotals(getClustersWithCapacityTotals(data));
+ }}
/>
);
}
diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/utils.ts b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/utils.ts
index d113e06c4f..8d5836a4e0 100644
--- a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/utils.ts
+++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/utils.ts
@@ -4,6 +4,8 @@ import {
StudyMetadata,
} from "../../../../../../../common/types";
import client from "../../../../../../../services/api/client";
+import type { PartialExceptFor } from "../../../../../../../utils/tsUtils";
+import type { ClusterWithCapacity } from "../common/clustersUtils";
////////////////////////////////////////////////////////////////
// Constants
@@ -51,7 +53,8 @@ export const TS_LAW_OPTIONS = ["geometric", "uniform"] as const;
// Types
////////////////////////////////////////////////////////////////
-type ThermalGroup = (typeof THERMAL_GROUPS)[number];
+export type ThermalGroup = (typeof THERMAL_GROUPS)[number];
+
type LocalTSGenerationBehavior = (typeof TS_GENERATION_OPTIONS)[number];
type TimeSeriesLawOption = (typeof TS_LAW_OPTIONS)[number];
@@ -83,10 +86,7 @@ export interface ThermalCluster extends ThermalPollutants {
lawPlanned: TimeSeriesLawOption;
}
-export interface ThermalClusterWithCapacity extends ThermalCluster {
- enabledCapacity: number;
- installedCapacity: number;
-}
+export type ThermalClusterWithCapacity = ClusterWithCapacity;
////////////////////////////////////////////////////////////////
// Functions
@@ -103,31 +103,29 @@ const getClusterUrl = (
clusterId: Cluster["id"],
): string => `${getClustersUrl(studyId, areaId)}/${clusterId}`;
-async function makeRequest(
- method: "get" | "post" | "patch" | "delete",
- url: string,
- data?: Partial | { data: Array },
-): Promise {
- const res = await client[method](url, data);
- return res.data;
-}
+////////////////////////////////////////////////////////////////
+// API
+////////////////////////////////////////////////////////////////
export async function getThermalClusters(
studyId: StudyMetadata["id"],
areaId: Area["name"],
-): Promise {
- return makeRequest("get", getClustersUrl(studyId, areaId));
+) {
+ const res = await client.get(
+ getClustersUrl(studyId, areaId),
+ );
+ return res.data;
}
export async function getThermalCluster(
studyId: StudyMetadata["id"],
areaId: Area["name"],
clusterId: Cluster["id"],
-): Promise {
- return makeRequest(
- "get",
+) {
+ const res = await client.get(
getClusterUrl(studyId, areaId, clusterId),
);
+ return res.data;
}
export async function updateThermalCluster(
@@ -135,32 +133,44 @@ export async function updateThermalCluster(
areaId: Area["name"],
clusterId: Cluster["id"],
data: Partial,
-): Promise {
- return makeRequest(
- "patch",
+) {
+ const res = await client.patch(
getClusterUrl(studyId, areaId, clusterId),
data,
);
+ return res.data;
}
export async function createThermalCluster(
studyId: StudyMetadata["id"],
areaId: Area["name"],
- data: Partial,
-): Promise {
- return makeRequest(
- "post",
+ data: PartialExceptFor,
+) {
+ const res = await client.post(
getClustersUrl(studyId, areaId),
data,
);
+ return res.data;
+}
+
+export async function duplicateThermalCluster(
+ studyId: StudyMetadata["id"],
+ areaId: Area["name"],
+ sourceClusterId: ThermalCluster["id"],
+ newName: ThermalCluster["name"],
+) {
+ const res = await client.post(
+ `/v1/studies/${studyId}/areas/${areaId}/thermals/${sourceClusterId}`,
+ null,
+ { params: { newName } },
+ );
+ return res.data;
}
-export function deleteThermalClusters(
+export async function deleteThermalClusters(
studyId: StudyMetadata["id"],
areaId: Area["name"],
clusterIds: Array,
-): Promise {
- return makeRequest("delete", getClustersUrl(studyId, areaId), {
- data: clusterIds,
- });
+) {
+ await client.delete(getClustersUrl(studyId, areaId), { data: clusterIds });
}
diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/common/clustersUtils.ts b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/common/clustersUtils.ts
new file mode 100644
index 0000000000..81b27a820a
--- /dev/null
+++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/common/clustersUtils.ts
@@ -0,0 +1,83 @@
+import { MRT_AggregationFn } from "material-react-table";
+import { ThermalClusterWithCapacity } from "../Thermal/utils";
+import { RenewableClusterWithCapacity } from "../Renewables/utils";
+
+/**
+ * Custom aggregation function summing the values of each row,
+ * to display enabled and installed capacity in the same cell.
+ * @param colHeader - the column header
+ * @param rows - the column rows to aggregate
+ * @returns a string with the sum of enabled and installed capacity.
+ * @example "100/200"
+ * @see https://www.material-react-table.com/docs/guides/aggregation-and-grouping#custom-aggregation-functions
+ */
+export const capacityAggregationFn = <
+ T extends ThermalClusterWithCapacity | RenewableClusterWithCapacity,
+>(): MRT_AggregationFn => {
+ return (columnId, leafRows) => {
+ const { enabledCapacitySum, installedCapacitySum } = leafRows.reduce(
+ (acc, row) => {
+ acc.enabledCapacitySum += row.original.enabledCapacity;
+ acc.installedCapacitySum += row.original.installedCapacity;
+
+ return acc;
+ },
+ { enabledCapacitySum: 0, installedCapacitySum: 0 },
+ );
+
+ return `${Math.floor(enabledCapacitySum)} / ${Math.floor(
+ installedCapacitySum,
+ )}`;
+ };
+};
+
+interface BaseCluster {
+ name: string;
+ group: string;
+ unitCount: number;
+ nominalCapacity: number;
+ enabled: boolean;
+}
+
+export type ClusterWithCapacity = T & {
+ installedCapacity: number;
+ enabledCapacity: number;
+};
+
+/**
+ * Adds the installed and enabled capacity fields to a cluster.
+ *
+ * @param cluster - The cluster to add the capacity fields to.
+ * @returns The cluster with the installed and enabled capacity fields added.
+ */
+export function addClusterCapacity(cluster: T) {
+ const { unitCount, nominalCapacity, enabled } = cluster;
+ const installedCapacity = unitCount * nominalCapacity;
+ const enabledCapacity = enabled ? installedCapacity : 0;
+ return { ...cluster, installedCapacity, enabledCapacity };
+}
+
+/**
+ * Gets the totals for unit count, installed capacity, and enabled capacity
+ * for the specified clusters.
+ *
+ * @param clusters - The clusters to get the totals for.
+ * @returns An object containing the totals.
+ */
+export function getClustersWithCapacityTotals(
+ clusters: Array>,
+) {
+ return clusters.reduce(
+ (acc, { unitCount, installedCapacity, enabledCapacity }) => {
+ acc.totalUnitCount += unitCount;
+ acc.totalInstalledCapacity += installedCapacity;
+ acc.totalEnabledCapacity += enabledCapacity;
+ return acc;
+ },
+ {
+ totalUnitCount: 0,
+ totalInstalledCapacity: 0,
+ totalEnabledCapacity: 0,
+ },
+ );
+}
diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/common/utils.ts b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/common/utils.ts
deleted file mode 100644
index 8528245c84..0000000000
--- a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/common/utils.ts
+++ /dev/null
@@ -1,118 +0,0 @@
-import { DependencyList, useMemo } from "react";
-import * as R from "ramda";
-import { MRT_AggregationFn } from "material-react-table";
-import { StudyMetadata } from "../../../../../../../common/types";
-import { editStudy } from "../../../../../../../services/api/study";
-import { ThermalClusterWithCapacity } from "../Thermal/utils";
-import { RenewableClusterWithCapacity } from "../Renewables/utils";
-import usePromiseWithSnackbarError from "../../../../../../../hooks/usePromiseWithSnackbarError";
-import { UsePromiseResponse } from "../../../../../../../hooks/usePromise";
-
-export const saveField = R.curry(
- (
- studyId: StudyMetadata["id"],
- path: string,
- data: Record,
- ): Promise => {
- return editStudy(data, studyId, path);
- },
-);
-
-/**
- * Custom aggregation function summing the values of each row,
- * to display enabled and installed capacity in the same cell.
- * @param colHeader - the column header
- * @param rows - the column rows to aggregate
- * @returns a string with the sum of enabled and installed capacity.
- * @example "100/200"
- * @see https://www.material-react-table.com/docs/guides/aggregation-and-grouping#custom-aggregation-functions
- */
-export const capacityAggregationFn = <
- T extends ThermalClusterWithCapacity | RenewableClusterWithCapacity,
->(): MRT_AggregationFn => {
- return (colHeader, rows) => {
- const { enabledCapacitySum, installedCapacitySum } = rows.reduce(
- (acc, row) => {
- acc.enabledCapacitySum += row.original.enabledCapacity ?? 0;
- acc.installedCapacitySum += row.original.installedCapacity ?? 0;
- return acc;
- },
- { enabledCapacitySum: 0, installedCapacitySum: 0 },
- );
-
- return `${Math.floor(enabledCapacitySum)} / ${Math.floor(
- installedCapacitySum,
- )}`;
- };
-};
-
-interface BaseCluster {
- name: string;
- group: string;
- unitCount: number;
- nominalCapacity: number;
- enabled: boolean;
-}
-
-type ClusterWithCapacity = T & {
- installedCapacity: number;
- enabledCapacity: number;
-};
-
-interface UseClusterDataWithCapacityReturn {
- clusters: UsePromiseResponse;
- clustersWithCapacity: Array>;
- totalUnitCount: number;
- totalInstalledCapacity: number;
- totalEnabledCapacity: number;
-}
-
-export const useClusterDataWithCapacity = (
- fetchFn: () => Promise,
- errorMessage: string,
- deps: DependencyList,
-): UseClusterDataWithCapacityReturn => {
- const clusters: UsePromiseResponse = usePromiseWithSnackbarError(
- fetchFn,
- {
- errorMessage,
- deps,
- },
- );
-
- const clustersWithCapacity: Array> = useMemo(
- () =>
- clusters.data?.map((cluster) => {
- const { unitCount, nominalCapacity, enabled } = cluster;
- const installedCapacity = unitCount * nominalCapacity;
- const enabledCapacity = enabled ? installedCapacity : 0;
- return { ...cluster, installedCapacity, enabledCapacity };
- }) || [],
- [clusters.data],
- );
-
- const { totalUnitCount, totalInstalledCapacity, totalEnabledCapacity } =
- useMemo(() => {
- return clustersWithCapacity.reduce(
- (acc, { unitCount, nominalCapacity, enabled }) => {
- acc.totalUnitCount += unitCount;
- acc.totalInstalledCapacity += unitCount * nominalCapacity;
- acc.totalEnabledCapacity += enabled ? unitCount * nominalCapacity : 0;
- return acc;
- },
- {
- totalUnitCount: 0,
- totalInstalledCapacity: 0,
- totalEnabledCapacity: 0,
- },
- );
- }, [clustersWithCapacity]);
-
- return {
- clusters,
- clustersWithCapacity,
- totalUnitCount: Math.floor(totalUnitCount),
- totalInstalledCapacity: Math.floor(totalInstalledCapacity),
- totalEnabledCapacity: Math.floor(totalEnabledCapacity),
- };
-};
diff --git a/webapp/src/components/App/Singlestudy/explore/TabWrapper.tsx b/webapp/src/components/App/Singlestudy/explore/TabWrapper.tsx
index ec7e9149c3..256d5cfa3a 100644
--- a/webapp/src/components/App/Singlestudy/explore/TabWrapper.tsx
+++ b/webapp/src/components/App/Singlestudy/explore/TabWrapper.tsx
@@ -84,7 +84,6 @@ function TabWrapper({
display: "flex",
flexDirection: "column",
justifyContent: "flex-start",
- alignItems: "center",
},
sx,
)}
diff --git a/webapp/src/components/common/GroupedDataTable/CreateDialog.tsx b/webapp/src/components/common/GroupedDataTable/CreateDialog.tsx
index d85cd669fd..d38a92814b 100644
--- a/webapp/src/components/common/GroupedDataTable/CreateDialog.tsx
+++ b/webapp/src/components/common/GroupedDataTable/CreateDialog.tsx
@@ -1,47 +1,37 @@
-import { t } from "i18next";
import AddCircleIcon from "@mui/icons-material/AddCircle";
import FormDialog from "../dialogs/FormDialog";
import StringFE from "../fieldEditors/StringFE";
import Fieldset from "../Fieldset";
import { SubmitHandlerPlus } from "../Form/types";
import SelectFE from "../fieldEditors/SelectFE";
-import { nameToId } from "../../../services/utils";
-import { TRow } from "./utils";
+import type { TRow } from "./types";
+import { useTranslation } from "react-i18next";
-interface Props {
+interface Props {
open: boolean;
onClose: VoidFunction;
- onSubmit: (values: TData) => Promise;
- groups: string[] | readonly string[];
- existingNames: Array;
+ onSubmit: (values: TRow) => Promise;
+ groups: string[];
+ existingNames: Array;
}
-const defaultValues = {
- name: "",
- group: "",
-};
-
-function CreateDialog({
+function CreateDialog({
open,
onClose,
onSubmit,
groups,
existingNames,
-}: Props) {
+}: Props) {
+ const { t } = useTranslation();
+
////////////////////////////////////////////////////////////////
// Event Handlers
////////////////////////////////////////////////////////////////
- const handleSubmit = async ({
- values,
- }: SubmitHandlerPlus) => {
- await onSubmit({
- ...values,
- id: nameToId(values.name),
- name: values.name.trim(),
- } as TData);
-
- onClose();
+ const handleSubmit = ({
+ values: { name, group },
+ }: SubmitHandlerPlus) => {
+ return onSubmit({ name: name.trim(), group });
};
////////////////////////////////////////////////////////////////
@@ -55,7 +45,6 @@ function CreateDialog({
open={open}
onCancel={onClose}
onSubmit={handleSubmit}
- config={{ defaultValues }}
>
{({ control }) => (
)}
diff --git a/webapp/src/components/common/GroupedDataTable/DuplicateDialog.tsx b/webapp/src/components/common/GroupedDataTable/DuplicateDialog.tsx
index 93daa1a3bc..fa6de0f9b3 100644
--- a/webapp/src/components/common/GroupedDataTable/DuplicateDialog.tsx
+++ b/webapp/src/components/common/GroupedDataTable/DuplicateDialog.tsx
@@ -1,5 +1,5 @@
import { useTranslation } from "react-i18next";
-import ControlPointDuplicateIcon from "@mui/icons-material/ControlPointDuplicate";
+import ContentCopyIcon from "@mui/icons-material/ContentCopy";
import Fieldset from "../Fieldset";
import FormDialog from "../dialogs/FormDialog";
import { SubmitHandlerPlus } from "../Form/types";
@@ -37,7 +37,7 @@ function DuplicateDialog(props: Props) {
{
+ cell: MRT_Cell;
+}
+
+function BooleanCell({ cell }: Props) {
+ const { t } = useTranslation();
+
+ return (
+
+ );
+}
+
+export default BooleanCell;
diff --git a/webapp/src/components/common/GroupedDataTable/index.tsx b/webapp/src/components/common/GroupedDataTable/index.tsx
index 5bc91534f9..6aed12cc32 100644
--- a/webapp/src/components/common/GroupedDataTable/index.tsx
+++ b/webapp/src/components/common/GroupedDataTable/index.tsx
@@ -1,69 +1,303 @@
import Box from "@mui/material/Box";
import AddCircleOutlineIcon from "@mui/icons-material/AddCircleOutline";
-import ControlPointDuplicateIcon from "@mui/icons-material/ControlPointDuplicate";
+import ContentCopyIcon from "@mui/icons-material/ContentCopy";
import DeleteOutlineIcon from "@mui/icons-material/DeleteOutline";
import DeleteIcon from "@mui/icons-material/Delete";
-import { Button } from "@mui/material";
+import { Button, Skeleton } from "@mui/material";
import {
MaterialReactTable,
MRT_ToggleFiltersButton,
MRT_ToggleGlobalFilterButton,
+ useMaterialReactTable,
type MRT_RowSelectionState,
type MRT_ColumnDef,
} from "material-react-table";
import { useTranslation } from "react-i18next";
-import { useMemo, useState } from "react";
+import { useEffect, useMemo, useRef, useState } from "react";
import CreateDialog from "./CreateDialog";
import ConfirmationDialog from "../dialogs/ConfirmationDialog";
-import { TRow, generateUniqueValue } from "./utils";
+import { generateUniqueValue, getTableOptionsForAlign } from "./utils";
import DuplicateDialog from "./DuplicateDialog";
+import { translateWithColon } from "../../../utils/i18nUtils";
+import useAutoUpdateRef from "../../../hooks/useAutoUpdateRef";
+import * as R from "ramda";
+import * as RA from "ramda-adjunct";
+import { PromiseAny } from "../../../utils/tsUtils";
+import useEnqueueErrorSnackbar from "../../../hooks/useEnqueueErrorSnackbar";
+import { toError } from "../../../utils/fnUtils";
+import useOperationInProgressCount from "../../../hooks/useOperationInProgressCount";
+import type { TRow } from "./types";
-export interface GroupedDataTableProps {
+export interface GroupedDataTableProps<
+ TGroups extends string[],
+ TData extends TRow,
+> {
data: TData[];
- columns: Array>;
- groups: string[] | readonly string[];
- onCreate?: (values: TData) => Promise;
- onDelete?: (ids: string[]) => void;
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ columns: Array>;
+ groups: TGroups;
+ onCreate?: (values: TRow) => Promise;
+ onDuplicate?: (row: TData, newName: string) => Promise;
+ onDelete?: (rows: TData[]) => PromiseAny | void;
+ onNameClick?: (row: TData) => void;
+ onDataChange?: (data: TData[]) => void;
+ isLoading?: boolean;
+ deleteConfirmationMessage?: string | ((count: number) => string);
+ fillPendingRow?: (
+ pendingRow: TRow,
+ ) => TRow & Partial;
}
-function GroupedDataTable({
+// Use ids to identify default columns (instead of `accessorKey`),
+// to have a unique identifier. It is more likely to have a duplicate
+// `accessorKey` with `columns` prop.
+const GROUP_COLUMN_ID = "_group";
+const NAME_COLUMN_ID = "_name";
+
+function GroupedDataTable<
+ TGroups extends string[],
+ TData extends TRow,
+>({
data,
columns,
groups,
onCreate,
+ onDuplicate,
onDelete,
-}: GroupedDataTableProps) {
+ onNameClick,
+ onDataChange,
+ isLoading,
+ deleteConfirmationMessage,
+ fillPendingRow,
+}: GroupedDataTableProps) {
const { t } = useTranslation();
const [openDialog, setOpenDialog] = useState<
"add" | "duplicate" | "delete" | ""
>("");
const [tableData, setTableData] = useState(data);
const [rowSelection, setRowSelection] = useState({});
+ const enqueueErrorSnackbar = useEnqueueErrorSnackbar();
+ // Allow to use the last version of `onNameClick` in `tableColumns`
+ const callbacksRef = useAutoUpdateRef({ onNameClick });
+ const pendingRows = useRef>>([]);
+ const { createOps, deleteOps, totalOps } = useOperationInProgressCount();
- const isAnyRowSelected = useMemo(
- () => Object.values(rowSelection).some((value) => value),
- [rowSelection],
- );
+ useEffect(() => setTableData(data), [data]);
- const isOneRowSelected = useMemo(
- () => Object.values(rowSelection).filter((value) => value).length === 1,
- [rowSelection],
- );
-
- const selectedRow = useMemo(() => {
- if (isOneRowSelected) {
- const selectedIndex = Object.keys(rowSelection).find(
- (key) => rowSelection[key],
- );
- return selectedIndex && tableData[+selectedIndex];
- }
- }, [isOneRowSelected, rowSelection, tableData]);
+ // eslint-disable-next-line react-hooks/exhaustive-deps
+ useEffect(() => onDataChange?.(tableData), [tableData]);
const existingNames = useMemo(
() => tableData.map((row) => row.name.toLowerCase()),
[tableData],
);
+ const tableColumns = useMemo>>(
+ () => [
+ {
+ accessorKey: "group",
+ header: t("global.group"),
+ id: GROUP_COLUMN_ID,
+ size: 50,
+ filterVariant: "autocomplete",
+ filterSelectOptions: groups,
+ footer: translateWithColon("global.total"),
+ ...getTableOptionsForAlign("left"),
+ },
+ {
+ accessorKey: "name",
+ header: t("global.name"),
+ id: NAME_COLUMN_ID,
+ size: 100,
+ filterVariant: "autocomplete",
+ filterSelectOptions: existingNames,
+ Cell:
+ callbacksRef.current.onNameClick &&
+ (({ renderedCellValue, row }) => {
+ if (isPendingRow(row.original)) {
+ return renderedCellValue;
+ }
+
+ return (
+ callbacksRef.current.onNameClick?.(row.original)}
+ >
+ {renderedCellValue}
+
+ );
+ }),
+ ...getTableOptionsForAlign("left"),
+ },
+ ...columns.map(
+ (column) =>
+ ({
+ ...column,
+ Cell: (props) => {
+ const { row, renderedCellValue } = props;
+ // Use JSX instead of call it directly to remove React warning:
+ // 'Warning: Internal React error: Expected static flag was missing.'
+ const CellComp = column.Cell;
+
+ if (isPendingRow(row.original)) {
+ return (
+
+ );
+ }
+
+ return CellComp ? : renderedCellValue;
+ },
+ }) as MRT_ColumnDef,
+ ),
+ ],
+ // eslint-disable-next-line react-hooks/exhaustive-deps
+ [columns, t, ...groups],
+ );
+
+ const table = useMaterialReactTable({
+ data: tableData,
+ columns: tableColumns,
+ initialState: {
+ grouping: [GROUP_COLUMN_ID],
+ density: "compact",
+ expanded: true,
+ columnPinning: { left: [GROUP_COLUMN_ID] },
+ },
+ state: { isLoading, isSaving: totalOps > 0, rowSelection },
+ enableGrouping: true,
+ enableStickyFooter: true,
+ enableStickyHeader: true,
+ enableColumnDragging: false,
+ enableColumnActions: false,
+ enableBottomToolbar: false,
+ enablePagination: false,
+ positionToolbarAlertBanner: "none",
+ // Rows
+ muiTableBodyRowProps: ({ row }) => {
+ const isPending = isPendingRow(row.original);
+
+ return {
+ onClick: () => {
+ if (isPending) {
+ return;
+ }
+
+ const isGrouped = row.getIsGrouped();
+ const rowIds = isGrouped
+ ? row.getLeafRows().map((r) => r.id)
+ : [row.id];
+
+ setRowSelection((prev) => {
+ const newValue = isGrouped
+ ? !rowIds.some((id) => prev[id]) // Select/Deselect all
+ : !prev[row.id];
+
+ return {
+ ...prev,
+ ...rowIds.reduce((acc, id) => ({ ...acc, [id]: newValue }), {}),
+ };
+ });
+ },
+ selected: rowSelection[row.id],
+ sx: { cursor: isPending ? "wait" : "pointer" },
+ };
+ },
+ // Toolbars
+ renderTopToolbarCustomActions: ({ table }) => (
+
+ {onCreate && (
+ }
+ variant="contained"
+ size="small"
+ onClick={() => setOpenDialog("add")}
+ >
+ {t("button.add")}
+
+ )}
+ {onDuplicate && (
+ }
+ variant="outlined"
+ size="small"
+ onClick={() => setOpenDialog("duplicate")}
+ disabled={table.getSelectedRowModel().rows.length !== 1}
+ >
+ {t("global.duplicate")}
+
+ )}
+ {onDelete && (
+ }
+ color="error"
+ variant="outlined"
+ size="small"
+ onClick={() => setOpenDialog("delete")}
+ disabled={table.getSelectedRowModel().rows.length === 0}
+ >
+ {t("global.delete")}
+
+ )}
+
+ ),
+ renderToolbarInternalActions: ({ table }) => (
+ <>
+
+
+ >
+ ),
+ onRowSelectionChange: setRowSelection,
+ // Styles
+ muiTablePaperProps: { sx: { display: "flex", flexDirection: "column" } }, // Allow to have scroll
+ ...R.mergeDeepRight(getTableOptionsForAlign("right"), {
+ muiTableBodyCellProps: {
+ sx: { borderBottom: "1px solid rgba(224, 224, 224, 0.3)" },
+ },
+ }),
+ });
+
+ const selectedRows = table
+ .getSelectedRowModel()
+ .rows.map((row) => row.original);
+ const selectedRow = selectedRows.length === 1 ? selectedRows[0] : null;
+
+ ////////////////////////////////////////////////////////////////
+ // Optimistic
+ ////////////////////////////////////////////////////////////////
+
+ const addPendingRow = (row: TRow) => {
+ const pendingRow = fillPendingRow?.(row) || row;
+
+ pendingRows.current.push(pendingRow);
+
+ // Type can be asserted as `TData` because the row will be checked in cell renders
+ // and `fillPendingRow` allows to add needed data
+ setTableData((prev) => [...prev, pendingRow as TData]);
+
+ return pendingRow;
+ };
+
+ const removePendingRow = (row: TRow) => {
+ if (isPendingRow(row)) {
+ pendingRows.current = pendingRows.current.filter((r) => r !== row);
+ setTableData((prev) => prev.filter((r) => r !== row));
+ }
+ };
+
+ function isPendingRow(row: TRow) {
+ return pendingRows.current.includes(row);
+ }
+
////////////////////////////////////////////////////////////////
// Utils
////////////////////////////////////////////////////////////////
@@ -74,51 +308,80 @@ function GroupedDataTable({
// Event Handlers
////////////////////////////////////////////////////////////////
- const handleCreate = async (values: TData) => {
- if (onCreate) {
- const newRow = await onCreate(values);
- setTableData((prevTableData) => [...prevTableData, newRow]);
- }
- };
+ const handleCreate = async (values: TRow) => {
+ closeDialog();
- const handleDelete = () => {
- if (!onDelete) {
+ if (!onCreate) {
return;
}
- const rowIndexes = Object.keys(rowSelection)
- .map(Number)
- // ignore groups names
- .filter(Number.isInteger);
+ createOps.increment();
+ const pendingRow = addPendingRow(values);
- const rowIdsToDelete = rowIndexes.map((index) => tableData[index].id);
+ try {
+ const newRow = await onCreate(values);
+ setTableData((prev) => [...prev, newRow]);
+ } catch (error) {
+ enqueueErrorSnackbar(t("global.error.create"), toError(error));
+ }
- onDelete(rowIdsToDelete);
- setTableData((prevTableData) =>
- prevTableData.filter((row) => !rowIdsToDelete.includes(row.id)),
- );
- setRowSelection({});
- closeDialog();
+ removePendingRow(pendingRow);
+ createOps.decrement();
};
- const handleDuplicate = async (name: string) => {
- if (!selectedRow) {
+ const handleDuplicate = async (newName: string) => {
+ closeDialog();
+
+ if (!onDuplicate || !selectedRow) {
return;
}
- const id = generateUniqueValue("id", name, tableData);
+ setRowSelection({});
const duplicatedRow = {
...selectedRow,
- id,
- name,
+ name: newName,
};
- if (onCreate) {
- const newRow = await onCreate(duplicatedRow);
- setTableData((prevTableData) => [...prevTableData, newRow]);
- setRowSelection({});
+ createOps.increment();
+ const pendingRow = addPendingRow(duplicatedRow);
+
+ try {
+ const newRow = await onDuplicate(selectedRow, newName);
+ setTableData((prev) => [...prev, newRow]);
+ } catch (error) {
+ enqueueErrorSnackbar(t("global.error.create"), toError(error));
}
+
+ removePendingRow(pendingRow);
+ createOps.decrement();
+ };
+
+ const handleDelete = async () => {
+ closeDialog();
+
+ if (!onDelete) {
+ return;
+ }
+
+ setRowSelection({});
+
+ const rowsToDelete = selectedRows;
+
+ setTableData((prevTableData) =>
+ prevTableData.filter((row) => !rowsToDelete.includes(row)),
+ );
+
+ deleteOps.increment();
+
+ try {
+ await onDelete(rowsToDelete);
+ } catch (error) {
+ enqueueErrorSnackbar(t("global.error.delete"), toError(error));
+ setTableData((prevTableData) => [...prevTableData, ...rowsToDelete]);
+ }
+
+ deleteOps.decrement();
};
////////////////////////////////////////////////////////////////
@@ -127,106 +390,7 @@ function GroupedDataTable({
return (
<>
- {
- const handleRowClick = () => {
- // prevent group rows to be selected
- if (groupingColumnId === undefined) {
- setRowSelection((prev) => ({
- ...prev,
- [id]: !prev[id],
- }));
- }
- };
-
- return {
- onClick: handleRowClick,
- selected: rowSelection[id],
- sx: {
- cursor: "pointer",
- },
- };
- }}
- state={{ rowSelection }}
- enableColumnDragging={false}
- enableColumnActions={false}
- positionToolbarAlertBanner="none"
- enableBottomToolbar={false}
- enableStickyFooter
- enableStickyHeader
- enablePagination={false}
- renderTopToolbarCustomActions={() => (
-
- {onCreate && (
- }
- variant="contained"
- size="small"
- onClick={() => setOpenDialog("add")}
- >
- {t("button.add")}
-
- )}
- }
- variant="outlined"
- size="small"
- onClick={() => setOpenDialog("duplicate")}
- disabled={!isOneRowSelected}
- >
- {t("global.duplicate")}
-
- {onDelete && (
- }
- variant="outlined"
- size="small"
- onClick={() => setOpenDialog("delete")}
- disabled={!isAnyRowSelected}
- >
- {t("global.delete")}
-
- )}
-
- )}
- renderToolbarInternalActions={({ table }) => (
- <>
-
-
- >
- )}
- muiTableHeadCellProps={{
- align: "right",
- }}
- muiTableBodyCellProps={{
- align: "right",
- sx: {
- borderBottom: "1px solid rgba(224, 224, 224, 0.3)",
- },
- }}
- muiTableFooterCellProps={{
- align: "right",
- }}
- muiTablePaperProps={{
- sx: {
- width: 1,
- display: "flex",
- flexDirection: "column",
- overflow: "auto",
- },
- }}
- />
+
{openDialog === "add" && (
({
onClose={closeDialog}
onSubmit={handleDuplicate}
existingNames={existingNames}
- defaultName={generateUniqueValue("name", selectedRow.name, tableData)}
+ defaultName={generateUniqueValue(selectedRow.name, tableData)}
/>
)}
{openDialog === "delete" && (
@@ -254,7 +418,9 @@ function GroupedDataTable({
onConfirm={handleDelete}
alert="warning"
>
- {t("studies.modelization.clusters.question.delete")}
+ {RA.isFunction(deleteConfirmationMessage)
+ ? deleteConfirmationMessage(selectedRows.length)
+ : deleteConfirmationMessage ?? t("dialog.message.confirmDelete")}
)}
>
diff --git a/webapp/src/components/common/GroupedDataTable/types.ts b/webapp/src/components/common/GroupedDataTable/types.ts
new file mode 100644
index 0000000000..6f91852cb4
--- /dev/null
+++ b/webapp/src/components/common/GroupedDataTable/types.ts
@@ -0,0 +1,4 @@
+export interface TRow {
+ name: string;
+ group: T;
+}
diff --git a/webapp/src/components/common/GroupedDataTable/utils.ts b/webapp/src/components/common/GroupedDataTable/utils.ts
index aad96a3784..82673c3877 100644
--- a/webapp/src/components/common/GroupedDataTable/utils.ts
+++ b/webapp/src/components/common/GroupedDataTable/utils.ts
@@ -1,15 +1,6 @@
import * as R from "ramda";
-import { nameToId } from "../../../services/utils";
-
-////////////////////////////////////////////////////////////////
-// Types
-////////////////////////////////////////////////////////////////
-
-export interface TRow {
- id: string;
- name: string;
- group: string;
-}
+import { TableCellProps } from "@mui/material";
+import type { TRow } from "./types";
////////////////////////////////////////////////////////////////
// Functions
@@ -58,24 +49,22 @@ export const generateNextValue = (
*
* This function leverages generateNextValue to ensure the uniqueness of the value.
*
- * @param {"name" | "id"} property - The property for which the unique value is generated.
- * @param {string} originalValue - The original value of the specified property.
- * @param {TRow[]} tableData - The existing table data to check against.
- * @returns {string} A unique value for the specified property.
+ * @param originalValue - The original value of the specified property.
+ * @param tableData - The existing table data to check against for ensuring uniqueness.
+ * @returns A unique value for the specified property.
*/
export const generateUniqueValue = (
- property: "name" | "id",
originalValue: string,
tableData: TRow[],
): string => {
- let baseValue: string;
-
- if (property === "name") {
- baseValue = `${originalValue} - copy`;
- } else {
- baseValue = nameToId(originalValue);
- }
-
- const existingValues = tableData.map((row) => row[property]);
- return generateNextValue(baseValue, existingValues);
+ const existingValues = tableData.map((row) => row.name);
+ return generateNextValue(`${originalValue} - copy`, existingValues);
};
+
+export function getTableOptionsForAlign(align: TableCellProps["align"]) {
+ return {
+ muiTableHeadCellProps: { align },
+ muiTableBodyCellProps: { align },
+ muiTableFooterCellProps: { align },
+ };
+}
diff --git a/webapp/src/hooks/useOperationInProgressCount.ts b/webapp/src/hooks/useOperationInProgressCount.ts
new file mode 100644
index 0000000000..bc71fb677a
--- /dev/null
+++ b/webapp/src/hooks/useOperationInProgressCount.ts
@@ -0,0 +1,51 @@
+import { useMemo, useState } from "react";
+import * as R from "ramda";
+
+/**
+ * Hook to tracks the number of CRUD operations in progress.
+ *
+ * @returns An object containing methods to increment, decrement,
+ * and retrieve the count of each operation type.
+ */
+function useOperationInProgressCount() {
+ const [opsInProgressCount, setOpsInProgressCount] = useState({
+ create: 0,
+ read: 0,
+ update: 0,
+ delete: 0,
+ });
+
+ const makeOperationMethods = (
+ operation: keyof typeof opsInProgressCount,
+ ) => ({
+ increment: (number = 1) => {
+ setOpsInProgressCount((prev) => ({
+ ...prev,
+ [operation]: prev[operation] + number,
+ }));
+ },
+ decrement: (number = 1) => {
+ setOpsInProgressCount((prev) => ({
+ ...prev,
+ [operation]: Math.max(prev[operation] - number, 0),
+ }));
+ },
+ total: opsInProgressCount[operation],
+ });
+
+ const methods = useMemo(
+ () => ({
+ createOps: makeOperationMethods("create"),
+ readOps: makeOperationMethods("read"),
+ updateOps: makeOperationMethods("update"),
+ deleteOps: makeOperationMethods("delete"),
+ totalOps: Object.values(opsInProgressCount).reduce(R.add, 0),
+ }),
+ // eslint-disable-next-line react-hooks/exhaustive-deps
+ [opsInProgressCount],
+ );
+
+ return methods;
+}
+
+export default useOperationInProgressCount;
diff --git a/webapp/src/hooks/useUpdateEffectOnce.ts b/webapp/src/hooks/useUpdateEffectOnce.ts
new file mode 100644
index 0000000000..61fedd115e
--- /dev/null
+++ b/webapp/src/hooks/useUpdateEffectOnce.ts
@@ -0,0 +1,23 @@
+import { useEffect, useRef } from "react";
+import { useUpdateEffect } from "react-use";
+
+/**
+ * Hook that runs the effect only at the first dependencies update.
+ * It behaves like the `useEffect` hook, but it skips the initial run,
+ * and the runs following the first update.
+ *
+ * @param effect - The effect function to run.
+ * @param deps - An array of dependencies to watch for changes.
+ */
+const useUpdateEffectOnce: typeof useEffect = (effect, deps) => {
+ const hasUpdated = useRef(false);
+
+ useUpdateEffect(() => {
+ if (!hasUpdated.current) {
+ hasUpdated.current = true;
+ return effect();
+ }
+ }, deps);
+};
+
+export default useUpdateEffectOnce;
diff --git a/webapp/src/i18n.ts b/webapp/src/i18n.ts
index d1d95a0574..980cffbf89 100644
--- a/webapp/src/i18n.ts
+++ b/webapp/src/i18n.ts
@@ -2,34 +2,35 @@ import i18n from "i18next";
import Backend from "i18next-http-backend";
import LanguageDetector from "i18next-browser-languagedetector";
import { initReactI18next } from "react-i18next";
+import { version } from "../package.json";
-export default function i18nInit(version = "unknown") {
- i18n
- // load translation using xhr -> see /public/locales
- // learn more: https://github.com/i18next/i18next-xhr-backend
- .use(Backend)
- // detect user language
- // learn more: https://github.com/i18next/i18next-browser-languageDetector
- .use(LanguageDetector)
- // pass the i18n instance to react-i18next.
- .use(initReactI18next)
- // init i18next
- // for all options read: https://www.i18next.com/overview/configuration-options
- .init({
- fallbackLng: "en",
- backend: {
- loadPath: `${
- import.meta.env.BASE_URL
- }locales/{{lng}}/{{ns}}.json?v=${version}`,
- },
- react: {
- useSuspense: false,
- },
- interpolation: {
- escapeValue: false, // not needed for react as it escapes by default
- },
- ns: ["main"],
- defaultNS: "main",
- returnNull: false,
- });
-}
+i18n
+ // load translation using xhr -> see /public/locales
+ // learn more: https://github.com/i18next/i18next-xhr-backend
+ .use(Backend)
+ // detect user language
+ // learn more: https://github.com/i18next/i18next-browser-languageDetector
+ .use(LanguageDetector)
+ // pass the i18n instance to react-i18next.
+ .use(initReactI18next)
+ // init i18next
+ // for all options read: https://www.i18next.com/overview/configuration-options
+ .init({
+ fallbackLng: "en",
+ backend: {
+ loadPath: `${
+ import.meta.env.BASE_URL
+ }locales/{{lng}}/{{ns}}.json?v=${version}`,
+ },
+ react: {
+ useSuspense: false,
+ },
+ interpolation: {
+ escapeValue: false, // not needed for react as it escapes by default
+ },
+ ns: ["main"],
+ defaultNS: "main",
+ returnNull: false,
+ });
+
+export default i18n;
diff --git a/webapp/src/index.tsx b/webapp/src/index.tsx
index 80dec85813..2c6792f7a8 100644
--- a/webapp/src/index.tsx
+++ b/webapp/src/index.tsx
@@ -1,7 +1,6 @@
import { createRoot } from "react-dom/client";
import { Provider } from "react-redux";
import { StyledEngineProvider } from "@mui/material";
-import i18nInit from "./i18n";
import "./index.css";
import App from "./components/App";
import { Config, initConfig } from "./services/config";
@@ -15,8 +14,6 @@ initConfig((config: Config) => {
window.location.reload();
}
- i18nInit(config.version.gitcommit);
-
const container = document.getElementById("root") as HTMLElement;
const root = createRoot(container);
diff --git a/webapp/src/utils/fnUtils.ts b/webapp/src/utils/fnUtils.ts
index d232d83246..155078d711 100644
--- a/webapp/src/utils/fnUtils.ts
+++ b/webapp/src/utils/fnUtils.ts
@@ -4,3 +4,22 @@
export function voidFn(...args: TArgs) {
// Do nothing
}
+
+/**
+ * A utility function that converts an unknown value to an Error object.
+ * If the value is already an Error object, it is returned as is.
+ * If the value is a string, it is used as the message for the new Error object.
+ * If the value is anything else, a new Error object with a generic message is created.
+ *
+ * @param error - The value to convert to an Error object.
+ * @returns An Error object.
+ */
+export function toError(error: unknown) {
+ if (error instanceof Error) {
+ return error;
+ }
+ if (typeof error === "string") {
+ return new Error(error);
+ }
+ return new Error("An unknown error occurred");
+}
diff --git a/webapp/src/utils/i18nUtils.ts b/webapp/src/utils/i18nUtils.ts
new file mode 100644
index 0000000000..c613deab68
--- /dev/null
+++ b/webapp/src/utils/i18nUtils.ts
@@ -0,0 +1,22 @@
+import i18n from "../i18n";
+
+/**
+ * Gets the current language used in the application.
+ *
+ * @returns The current language.
+ */
+export function getCurrentLanguage() {
+ return i18n.language;
+}
+
+/**
+ * Translates the given key and appends a colon (:) at the end
+ * with the appropriate spacing for the current language.
+ *
+ * @param key - The translation key.
+ * @returns The translated string with a colon (:) appended.
+ */
+export function translateWithColon(key: string): string {
+ const lang = i18n.language;
+ return `${i18n.t(key)}${lang.startsWith("fr") ? " " : ""}:`;
+}
diff --git a/webapp/src/utils/tsUtils.ts b/webapp/src/utils/tsUtils.ts
index eb60713aa8..7acf6465a2 100644
--- a/webapp/src/utils/tsUtils.ts
+++ b/webapp/src/utils/tsUtils.ts
@@ -1,3 +1,16 @@
+import { O } from "ts-toolbelt";
+
+/**
+ * Allow to use `any` with `Promise` type without disabling ESLint rule.
+ */
+// eslint-disable-next-line @typescript-eslint/no-explicit-any
+export type PromiseAny = Promise;
+
+/**
+ * Make all properties in T optional, except for those specified by K.
+ */
+export type PartialExceptFor = O.Required, K>;
+
export function tuple(...items: T): T {
return items;
}
diff --git a/webapp/src/utils/validationUtils.ts b/webapp/src/utils/validationUtils.ts
new file mode 100644
index 0000000000..9af316cbba
--- /dev/null
+++ b/webapp/src/utils/validationUtils.ts
@@ -0,0 +1,182 @@
+import { t } from "i18next";
+
+////////////////////////////////////////////////////////////////
+// Types
+////////////////////////////////////////////////////////////////
+
+interface ValidationOptions {
+ existingValues?: string[];
+ excludedValues?: string[];
+ isCaseSensitive?: boolean;
+ allowSpecialChars?: boolean;
+ specialChars?: string;
+ allowSpaces?: boolean;
+ editedValue?: string;
+ min?: number;
+ max?: number;
+}
+
+////////////////////////////////////////////////////////////////
+// Validators
+////////////////////////////////////////////////////////////////
+
+/**
+ * Validates a single string value against specified criteria.
+ *
+ * Validates the input string against a variety of checks including length restrictions,
+ * character validations, and uniqueness against provided arrays of existing and excluded values.
+ *
+ * @param value - The string to validate. Leading and trailing spaces will be trimmed.
+ * @param options - Configuration options for validation. (Optional)
+ * @param [options.existingValues=[]] - An array of strings to check against for duplicates. Comparison is case-insensitive by default.
+ * @param [options.excludedValues=[]] - An array of strings that the value should not match.
+ * @param [options.isCaseSensitive=false] - Whether the comparison with `existingValues` and `excludedValues` is case-sensitive. Defaults to false.
+ * @param [options.allowSpecialChars=true] - Flags if special characters are permitted in the value.
+ * @param [options.specialChars="&()_-"] - A string representing additional allowed characters outside the typical alphanumeric scope.
+ * @param [options.allowSpaces=true] - Flags if spaces are allowed in the value.
+ * @param [options.editedValue=""] - The current value being edited, to exclude it from duplicate checks.
+ * @param [options.min=0] - Minimum length required for the string. Defaults to 0.
+ * @param [options.max=255] - Maximum allowed length for the string. Defaults to 255.
+ * @returns True if validation is successful, or a localized error message if it fails.
+ */
+export function validateString(
+ value: string,
+ options?: ValidationOptions,
+): string | true {
+ const {
+ existingValues = [],
+ excludedValues = [],
+ isCaseSensitive = false,
+ allowSpecialChars = true,
+ allowSpaces = true,
+ specialChars = "&()_-",
+ editedValue = "",
+ min = 0,
+ max = 255,
+ } = options || {};
+
+ const trimmedValue = value.trim();
+
+ if (!trimmedValue) {
+ return t("form.field.required");
+ }
+
+ if (!allowSpaces && trimmedValue.includes(" ")) {
+ return t("form.field.spacesNotAllowed");
+ }
+
+ if (trimmedValue.length < min) {
+ return t("form.field.minValue", { 0: min });
+ }
+
+ if (trimmedValue.length > max) {
+ return t("form.field.maxValue", { 0: max });
+ }
+
+ // Compiles a regex pattern based on allowed characters and flags.
+ const specialCharsPattern = new RegExp(
+ generatePattern(allowSpaces, allowSpecialChars, specialChars),
+ );
+
+ // Validates the string against the allowed characters regex.
+ if (!specialCharsPattern.test(trimmedValue)) {
+ return specialChars === "" || !allowSpecialChars
+ ? t("form.field.specialCharsNotAllowed")
+ : t("form.field.specialChars", { 0: specialChars });
+ }
+
+ // Normalize the value for comparison, based on case sensitivity option.
+ const normalize = (v: string) =>
+ isCaseSensitive ? v.trim() : v.toLowerCase().trim();
+
+ // Prepare the value for duplicate and exclusion checks.
+ const comparisonValue = normalize(trimmedValue);
+
+ // Some forms requires to keep the original value while updating other fields.
+ if (normalize(editedValue) === comparisonValue) {
+ return true;
+ }
+
+ // Check for duplication against existing values.
+ if (existingValues.map(normalize).includes(comparisonValue)) {
+ return t("form.field.duplicate");
+ }
+
+ // Check for inclusion in the list of excluded values.
+ if (excludedValues.map(normalize).includes(comparisonValue)) {
+ return t("form.field.notAllowedValue", { 0: value });
+ }
+
+ return true;
+}
+
+/**
+ * Validates a password string for strong security criteria.
+ *
+ * @param password - The password to validate.
+ * @returns True if validation is successful, or a localized error message if it fails.
+ */
+export function validatePassword(password: string): string | true {
+ const trimmedPassword = password.trim();
+
+ if (!trimmedPassword) {
+ return t("form.field.required");
+ }
+
+ if (trimmedPassword.length < 8) {
+ return t("form.field.minValue", { 0: 8 });
+ }
+
+ if (trimmedPassword.length > 50) {
+ return t("form.field.maxValue", { 0: 50 });
+ }
+
+ if (!/[a-z]/.test(trimmedPassword)) {
+ return t("form.field.requireLowercase");
+ }
+
+ if (!/[A-Z]/.test(trimmedPassword)) {
+ return t("form.field.requireUppercase");
+ }
+
+ if (!/\d/.test(trimmedPassword)) {
+ return t("form.field.requireDigit");
+ }
+
+ if (!/[^\w\s]/.test(trimmedPassword)) {
+ return t("form.field.requireSpecialChars");
+ }
+
+ return true;
+}
+
+////////////////////////////////////////////////////////////////
+// Utils
+////////////////////////////////////////////////////////////////
+
+// Escape special characters in specialChars
+function escapeSpecialChars(chars: string) {
+ return chars.replace(/[-\\^$*+?.()|[\]{}]/g, "\\$&");
+}
+
+/**
+ * Generates a regular expression pattern for string validation based on specified criteria.
+ * This pattern includes considerations for allowing spaces, special characters, and any additional
+ * characters specified in `specialChars`.
+ *
+ * @param allowSpaces - Indicates if spaces are permitted in the string.
+ * @param allowSpecialChars - Indicates if special characters are permitted.
+ * @param specialChars - Specifies additional characters to allow in the string.
+ * @returns The regular expression pattern as a string.
+ */
+function generatePattern(
+ allowSpaces: boolean,
+ allowSpecialChars: boolean,
+ specialChars: string,
+): string {
+ const basePattern = "^[a-zA-Z0-9";
+ const spacePattern = allowSpaces ? " " : "";
+ const specialCharsPattern =
+ allowSpecialChars && specialChars ? escapeSpecialChars(specialChars) : "";
+ return basePattern + spacePattern + specialCharsPattern + "]*$";
+}
From 1a211fde88463131ea158202029af852a7e0cbe1 Mon Sep 17 00:00:00 2001
From: Laurent LAPORTE <43534797+laurent-laporte-pro@users.noreply.github.com>
Date: Fri, 19 Apr 2024 16:12:56 +0200
Subject: [PATCH 16/16] fix(synthesis): prevent 500 error during study
synthesis parsing (#2011)
---
.../filesystem/config/field_validators.py | 77 +++++++++++
.../rawstudy/model/filesystem/config/files.py | 118 ++++++++++-------
.../rawstudy/model/filesystem/config/model.py | 114 +++++++++--------
docs/CHANGELOG.md | 1 +
.../filesystem/config/test_config_files.py | 120 ++++++++++++------
5 files changed, 291 insertions(+), 139 deletions(-)
create mode 100644 antarest/study/storage/rawstudy/model/filesystem/config/field_validators.py
diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/field_validators.py b/antarest/study/storage/rawstudy/model/filesystem/config/field_validators.py
new file mode 100644
index 0000000000..74f93f5c46
--- /dev/null
+++ b/antarest/study/storage/rawstudy/model/filesystem/config/field_validators.py
@@ -0,0 +1,77 @@
+import typing as t
+
+_ALL_FILTERING = ["hourly", "daily", "weekly", "monthly", "annual"]
+
+
+def extract_filtering(v: t.Any) -> t.Sequence[str]:
+ """
+ Extract filtering values from a comma-separated list of values.
+ """
+
+ if v is None:
+ values = set()
+ elif isinstance(v, str):
+ values = {x.strip() for x in v.lower().split(",")} if v else set()
+ elif isinstance(v, (list, tuple)):
+ values = set(x.strip().lower() for x in v)
+ else:
+ raise TypeError(f"Invalid type for filtering: {type(v)!r}")
+
+ try:
+ return sorted(values, key=lambda x: _ALL_FILTERING.index(x))
+ except ValueError as e:
+ raise ValueError(f"Invalid value for filtering: {e!s}") from None
+
+
+def validate_filtering(v: t.Any) -> str:
+ """
+ Validate the filtering field and convert it to a comma separated string.
+ """
+
+ return ", ".join(extract_filtering(v))
+
+
+# noinspection SpellCheckingInspection
+def validate_colors(values: t.MutableMapping[str, t.Any]) -> t.Mapping[str, t.Any]:
+ """
+ Validate ``color_rgb``, ``color_r``, ``color_g``, ``color_b`` and convert them to ``color_rgb``.
+ """
+
+ def _pop_any(dictionary: t.MutableMapping[str, t.Any], *keys: str) -> t.Any:
+ """Save as `pop` but for multiple keys. Return the first found value."""
+ return next((dictionary.pop(key, None) for key in keys if key in dictionary), None)
+
+ color_r = _pop_any(values, "color_r", "colorr")
+ color_g = _pop_any(values, "color_g", "colorg")
+ color_b = _pop_any(values, "color_b", "colorb")
+ if color_r is not None and color_g is not None and color_b is not None:
+ values["color_rgb"] = color_r, color_g, color_b
+ return values
+
+
+def validate_color_rgb(v: t.Any) -> str:
+ """
+ Validate RGB color field and convert it to color code.
+
+ Accepts:
+ - a string in the format "#RRGGBB"
+ - a string in the format "rgb(R, G, B)"
+ - a string in the format "R, G, B"
+ - a list or tuple of 3 integers
+ """
+
+ if isinstance(v, str):
+ if v.startswith("#"):
+ r = int(v[1:3], 16)
+ g = int(v[3:5], 16)
+ b = int(v[5:7], 16)
+ elif v.startswith("rgb("):
+ r, g, b = [int(c) for c in v[4:-1].split(",")]
+ else:
+ r, g, b = [int(c) for c in v.split(",")]
+ elif isinstance(v, (list, tuple)):
+ r, g, b = map(int, v)
+ else:
+ raise TypeError(f"Invalid type for 'color_rgb': {type(v)}")
+
+ return f"#{r:02X}{g:02X}{b:02X}"
diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/files.py b/antarest/study/storage/rawstudy/model/filesystem/config/files.py
index 3248b6560a..cafc901644 100644
--- a/antarest/study/storage/rawstudy/model/filesystem/config/files.py
+++ b/antarest/study/storage/rawstudy/model/filesystem/config/files.py
@@ -18,6 +18,7 @@
SimulationParsingError,
XpansionParsingError,
)
+from antarest.study.storage.rawstudy.model.filesystem.config.field_validators import extract_filtering
from antarest.study.storage.rawstudy.model.filesystem.config.model import (
Area,
DistrictSet,
@@ -83,6 +84,48 @@ def build(study_path: Path, study_id: str, output_path: t.Optional[Path] = None)
)
+def _extract_text_from_zip(root: Path, posix_path: str) -> t.Sequence[str]:
+ """
+ Extracts text from a file inside a ZIP archive and returns it as a list of lines.
+
+ Args:
+ root: The path to the ZIP archive.
+ posix_path: The relative path to the file inside the ZIP archive.
+
+ Returns:
+ A list of lines in the file. If the file is not found, an empty list is returned.
+ """
+ with zipfile.ZipFile(root) as zf:
+ try:
+ with zf.open(posix_path) as f:
+ text = f.read().decode("utf-8")
+ return text.splitlines(keepends=False)
+ except KeyError:
+ return []
+
+
+def _extract_ini_from_zip(root: Path, posix_path: str, multi_ini_keys: t.Sequence[str] = ()) -> t.Mapping[str, t.Any]:
+ """
+ Extracts data from an INI file inside a ZIP archive and returns it as a dictionary.
+
+ Args:
+ root: The path to the ZIP archive.
+ posix_path: The relative path to the file inside the ZIP archive.
+ multi_ini_keys: List of keys to use for multi INI files.
+
+ Returns:
+ A dictionary of keys/values in the INI file. If the file is not found, an empty dictionary is returned.
+ """
+ reader = IniReader(multi_ini_keys)
+ with zipfile.ZipFile(root) as zf:
+ try:
+ with zf.open(posix_path) as f:
+ buffer = io.StringIO(f.read().decode("utf-8"))
+ return reader.read(buffer)
+ except KeyError:
+ return {}
+
+
def _extract_data_from_file(
root: Path,
inside_root_path: Path,
@@ -110,14 +153,7 @@ def _extract_data_from_file(
if file_type == FileType.TXT:
# Parse the file as a list of lines, return an empty list if missing.
if is_zip_file:
- with zipfile.ZipFile(root) as zf:
- try:
- with zf.open(posix_path) as f:
- text = f.read().decode("utf-8")
- return text.splitlines(keepends=False)
- except KeyError:
- # File not found in the ZIP archive
- return []
+ return _extract_text_from_zip(root, posix_path)
else:
output_data_path = root / inside_root_path
try:
@@ -127,19 +163,12 @@ def _extract_data_from_file(
elif file_type in {FileType.MULTI_INI, FileType.SIMPLE_INI}:
# Parse the file as a dictionary of keys/values, return an empty dictionary if missing.
- reader = IniReader(multi_ini_keys)
if is_zip_file:
- with zipfile.ZipFile(root) as zf:
- try:
- with zf.open(posix_path) as f:
- buffer = io.StringIO(f.read().decode("utf-8"))
- return reader.read(buffer)
- except KeyError:
- # File not found in the ZIP archive
- return {}
+ return _extract_ini_from_zip(root, posix_path, multi_ini_keys=multi_ini_keys)
else:
output_data_path = root / inside_root_path
try:
+ reader = IniReader(multi_ini_keys)
return reader.read(output_data_path)
except FileNotFoundError:
return {}
@@ -294,7 +323,7 @@ def _parse_xpansion_version(path: Path) -> str:
raise XpansionParsingError(xpansion_json, f"key '{exc}' not found in JSON object") from exc
-_regex_eco_adq = re.compile("^([0-9]{8}-[0-9]{4})(eco|adq)-?(.*)")
+_regex_eco_adq = re.compile(r"^(\d{8}-\d{4})(eco|adq)-?(.*)")
match_eco_adq = _regex_eco_adq.match
@@ -359,14 +388,36 @@ def get_playlist(config: JSON) -> t.Optional[t.Dict[int, float]]:
def parse_area(root: Path, area: str) -> "Area":
+ """
+ Parse an area configuration and extract its filtering configuration.
+
+ Args:
+ root: The root directory of the study.
+ area: The name of the area to parse.
+
+ Returns:
+ The area configuration.
+ """
area_id = transform_name_to_id(area)
+
+ # Parse the optimization INI file to extract the filtering configuration.
+ # The file is optional, so we use a default value to avoid a parsing error.
+ optimization = _extract_data_from_file(
+ root=root,
+ inside_root_path=Path(f"input/areas/{area_id}/optimization.ini"),
+ file_type=FileType.SIMPLE_INI,
+ )
+ filtering = optimization.get("filtering", {})
+ filter_synthesis = extract_filtering(filtering.get("filter-synthesis", ""))
+ filter_year_by_year = extract_filtering(filtering.get("filter-year-by-year", ""))
+
return Area(
name=area,
- links=_parse_links(root, area_id),
+ links=_parse_links_filtering(root, area_id),
thermals=_parse_thermal(root, area_id),
renewables=_parse_renewables(root, area_id),
- filters_synthesis=_parse_filters_synthesis(root, area_id),
- filters_year=_parse_filters_year(root, area_id),
+ filters_synthesis=filter_synthesis,
+ filters_year=filter_year_by_year,
st_storages=_parse_st_storage(root, area_id),
)
@@ -444,33 +495,14 @@ def _parse_st_storage(root: Path, area: str) -> t.List[STStorageConfigType]:
return config_list
-def _parse_links(root: Path, area: str) -> t.Dict[str, Link]:
+def _parse_links_filtering(root: Path, area: str) -> t.Dict[str, Link]:
properties_ini = _extract_data_from_file(
root=root,
inside_root_path=Path(f"input/links/{area}/properties.ini"),
file_type=FileType.SIMPLE_INI,
)
- return {link: Link.from_json(properties_ini[link]) for link in list(properties_ini.keys())}
-
-
-def _parse_filters_synthesis(root: Path, area: str) -> t.List[str]:
- optimization = _extract_data_from_file(
- root=root,
- inside_root_path=Path(f"input/areas/{area}/optimization.ini"),
- file_type=FileType.SIMPLE_INI,
- )
- filters: str = optimization["filtering"]["filter-synthesis"]
- return Link.split(filters)
-
-
-def _parse_filters_year(root: Path, area: str) -> t.List[str]:
- optimization = _extract_data_from_file(
- root=root,
- inside_root_path=Path(f"input/areas/{area}/optimization.ini"),
- file_type=FileType.SIMPLE_INI,
- )
- filters: str = optimization["filtering"]["filter-year-by-year"]
- return Link.split(filters)
+ links_by_ids = {link_id: Link(**obj) for link_id, obj in properties_ini.items()}
+ return links_by_ids
def _check_build_on_solver_tests(test_dir: Path) -> None:
diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/model.py b/antarest/study/storage/rawstudy/model/filesystem/config/model.py
index 79400d8165..18e9702571 100644
--- a/antarest/study/storage/rawstudy/model/filesystem/config/model.py
+++ b/antarest/study/storage/rawstudy/model/filesystem/config/model.py
@@ -1,15 +1,15 @@
import re
+import typing as t
from enum import Enum
from pathlib import Path
-from typing import Dict, List, Optional
-from pydantic import Extra
+from pydantic import Field, root_validator
from pydantic.main import BaseModel
-from antarest.core.model import JSON
from antarest.core.utils.utils import DTO
from .binding_constraint import BindingConstraintDTO
+from .field_validators import extract_filtering
from .renewable import RenewableConfigType
from .st_storage import STStorageConfigType
from .thermal import ThermalConfigType
@@ -20,42 +20,44 @@ class ENR_MODELLING(Enum):
CLUSTERS = "clusters"
-class Link(BaseModel):
+class Link(BaseModel, extra="ignore"):
"""
Object linked to /input/links//properties.ini information
- """
- filters_synthesis: List[str]
- filters_year: List[str]
+ Attributes:
+ filters_synthesis: list of filters for synthesis data
+ filters_year: list of filters for year-by-year data
- @staticmethod
- def from_json(properties: JSON) -> "Link":
- return Link(
- filters_year=Link.split(properties["filter-year-by-year"]),
- filters_synthesis=Link.split(properties["filter-synthesis"]),
- )
+ Notes:
+ Ignore extra fields, because we only need `filter-synthesis` and `filter-year-by-year`.
+ """
- @staticmethod
- def split(line: str) -> List[str]:
- return [token.strip() for token in line.split(",") if token.strip() != ""]
+ filters_synthesis: t.List[str] = Field(default_factory=list)
+ filters_year: t.List[str] = Field(default_factory=list)
+
+ @root_validator(pre=True)
+ def validation(cls, values: t.MutableMapping[str, t.Any]) -> t.MutableMapping[str, t.Any]:
+ # note: field names are in kebab-case in the INI file
+ filters_synthesis = values.pop("filter-synthesis", values.pop("filters_synthesis", ""))
+ filters_year = values.pop("filter-year-by-year", values.pop("filters_year", ""))
+ values["filters_synthesis"] = extract_filtering(filters_synthesis)
+ values["filters_year"] = extract_filtering(filters_year)
+ return values
-class Area(BaseModel):
+class Area(BaseModel, extra="forbid"):
"""
Object linked to /input//optimization.ini information
"""
- class Config:
- extra = Extra.forbid
-
name: str
- links: Dict[str, Link]
- thermals: List[ThermalConfigType]
- renewables: List[RenewableConfigType]
- filters_synthesis: List[str]
- filters_year: List[str]
+ links: t.Dict[str, Link]
+ thermals: t.List[ThermalConfigType]
+ renewables: t.List[RenewableConfigType]
+ filters_synthesis: t.List[str]
+ filters_year: t.List[str]
# since v8.6
- st_storages: List[STStorageConfigType] = []
+ st_storages: t.List[STStorageConfigType] = []
class DistrictSet(BaseModel):
@@ -64,14 +66,14 @@ class DistrictSet(BaseModel):
"""
ALL = ["hourly", "daily", "weekly", "monthly", "annual"]
- name: Optional[str] = None
+ name: t.Optional[str] = None
inverted_set: bool = False
- areas: Optional[List[str]] = None
+ areas: t.Optional[t.List[str]] = None
output: bool = True
- filters_synthesis: List[str] = ALL
- filters_year: List[str] = ALL
+ filters_synthesis: t.List[str] = ALL
+ filters_year: t.List[str] = ALL
- def get_areas(self, all_areas: List[str]) -> List[str]:
+ def get_areas(self, all_areas: t.List[str]) -> t.List[str]:
if self.inverted_set:
return list(set(all_areas).difference(set(self.areas or [])))
return self.areas or []
@@ -89,7 +91,7 @@ class Simulation(BaseModel):
synthesis: bool
by_year: bool
error: bool
- playlist: Optional[List[int]]
+ playlist: t.Optional[t.List[int]]
archived: bool = False
xpansion: str
@@ -110,16 +112,16 @@ def __init__(
path: Path,
study_id: str,
version: int,
- output_path: Optional[Path] = None,
- areas: Optional[Dict[str, Area]] = None,
- sets: Optional[Dict[str, DistrictSet]] = None,
- outputs: Optional[Dict[str, Simulation]] = None,
- bindings: Optional[List[BindingConstraintDTO]] = None,
+ output_path: t.Optional[Path] = None,
+ areas: t.Optional[t.Dict[str, Area]] = None,
+ sets: t.Optional[t.Dict[str, DistrictSet]] = None,
+ outputs: t.Optional[t.Dict[str, Simulation]] = None,
+ bindings: t.Optional[t.List[BindingConstraintDTO]] = None,
store_new_set: bool = False,
- archive_input_series: Optional[List[str]] = None,
+ archive_input_series: t.Optional[t.List[str]] = None,
enr_modelling: str = ENR_MODELLING.AGGREGATED.value,
- cache: Optional[Dict[str, List[str]]] = None,
- zip_path: Optional[Path] = None,
+ cache: t.Optional[t.Dict[str, t.List[str]]] = None,
+ zip_path: t.Optional[Path] = None,
):
self.study_path = study_path
self.path = path
@@ -138,7 +140,7 @@ def __init__(
def next_file(self, name: str, is_output: bool = False) -> "FileStudyTreeConfig":
if is_output and name in self.outputs and self.outputs[name].archived:
- zip_path: Optional[Path] = self.path / f"{name}.zip"
+ zip_path: t.Optional[Path] = self.path / f"{name}.zip"
else:
zip_path = self.zip_path
@@ -176,43 +178,43 @@ def at_file(self, filepath: Path) -> "FileStudyTreeConfig":
cache=self.cache,
)
- def area_names(self) -> List[str]:
+ def area_names(self) -> t.List[str]:
return self.cache.get("%areas", list(self.areas.keys()))
- def set_names(self, only_output: bool = True) -> List[str]:
+ def set_names(self, only_output: bool = True) -> t.List[str]:
return self.cache.get(
f"%districts%{only_output}",
[k for k, v in self.sets.items() if v.output or not only_output],
)
- def get_thermal_ids(self, area: str) -> List[str]:
+ def get_thermal_ids(self, area: str) -> t.List[str]:
"""
Returns a list of thermal cluster IDs for a given area.
Note that IDs may not be in lower case (but series IDs are).
"""
return self.cache.get(f"%thermal%{area}%{area}", [th.id for th in self.areas[area].thermals])
- def get_renewable_ids(self, area: str) -> List[str]:
+ def get_renewable_ids(self, area: str) -> t.List[str]:
"""
Returns a list of renewable cluster IDs for a given area.
Note that IDs may not be in lower case (but series IDs are).
"""
return self.cache.get(f"%renewable%{area}", [r.id for r in self.areas[area].renewables])
- def get_st_storage_ids(self, area: str) -> List[str]:
+ def get_st_storage_ids(self, area: str) -> t.List[str]:
return self.cache.get(f"%st-storage%{area}", [s.id for s in self.areas[area].st_storages])
- def get_links(self, area: str) -> List[str]:
+ def get_links(self, area: str) -> t.List[str]:
return self.cache.get(f"%links%{area}", list(self.areas[area].links.keys()))
- def get_filters_synthesis(self, area: str, link: Optional[str] = None) -> List[str]:
+ def get_filters_synthesis(self, area: str, link: t.Optional[str] = None) -> t.List[str]:
if link:
return self.areas[area].links[link].filters_synthesis
if area in self.sets and self.sets[area].output:
return self.sets[area].filters_synthesis
return self.areas[area].filters_synthesis
- def get_filters_year(self, area: str, link: Optional[str] = None) -> List[str]:
+ def get_filters_year(self, area: str, link: t.Optional[str] = None) -> t.List[str]:
if link:
return self.areas[area].links[link].filters_year
if area in self.sets and self.sets[area].output:
@@ -245,15 +247,15 @@ class FileStudyTreeConfigDTO(BaseModel):
path: Path
study_id: str
version: int
- output_path: Optional[Path] = None
- areas: Dict[str, Area] = dict()
- sets: Dict[str, DistrictSet] = dict()
- outputs: Dict[str, Simulation] = dict()
- bindings: List[BindingConstraintDTO] = list()
+ output_path: t.Optional[Path] = None
+ areas: t.Dict[str, Area] = dict()
+ sets: t.Dict[str, DistrictSet] = dict()
+ outputs: t.Dict[str, Simulation] = dict()
+ bindings: t.List[BindingConstraintDTO] = list()
store_new_set: bool = False
- archive_input_series: List[str] = list()
+ archive_input_series: t.List[str] = list()
enr_modelling: str = ENR_MODELLING.AGGREGATED.value
- zip_path: Optional[Path] = None
+ zip_path: t.Optional[Path] = None
@staticmethod
def from_build_config(
diff --git a/docs/CHANGELOG.md b/docs/CHANGELOG.md
index aa70beb064..1f38c7dd7e 100644
--- a/docs/CHANGELOG.md
+++ b/docs/CHANGELOG.md
@@ -27,6 +27,7 @@ v2.16.8 (2024-04-19)
* **launcher:** upgrade the project dependencies to use Antares-Launcher v1.3.2
- **ssh:** add retry loop around SSH Exceptions [`#68`](https://github.com/AntaresSimulatorTeam/antares-launcher/pull/68)
- **retriever:** avoid infinite loop when `sbatch` command fails [`#69`](https://github.com/AntaresSimulatorTeam/antares-launcher/pull/69)
+* **synthesis:** prevent 500 error during study synthesis parsing [`#2011`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/2011)
v2.16.7 (2024-03-05)
diff --git a/tests/storage/repository/filesystem/config/test_config_files.py b/tests/storage/repository/filesystem/config/test_config_files.py
index a8d8d2fecc..ce29f1a446 100644
--- a/tests/storage/repository/filesystem/config/test_config_files.py
+++ b/tests/storage/repository/filesystem/config/test_config_files.py
@@ -1,4 +1,5 @@
import logging
+import textwrap
from pathlib import Path
from typing import Any, Dict
from zipfile import ZipFile
@@ -10,7 +11,7 @@
BindingConstraintFrequency,
)
from antarest.study.storage.rawstudy.model.filesystem.config.files import (
- _parse_links,
+ _parse_links_filtering,
_parse_renewables,
_parse_sets,
_parse_st_storage,
@@ -31,8 +32,12 @@
from tests.storage.business.assets import ASSETS_DIR
-def build_empty_files(tmp: Path) -> Path:
- study_path = tmp / "my-study"
+@pytest.fixture(name="study_path")
+def study_path_fixture(tmp_path: Path) -> Path:
+ """
+ Create a study directory with the minimal structure required to build the configuration.
+ """
+ study_path = tmp_path / "my-study"
(study_path / "input/bindingconstraints/").mkdir(parents=True)
(study_path / "input/bindingconstraints/bindingconstraints.ini").touch()
@@ -49,31 +54,29 @@ def build_empty_files(tmp: Path) -> Path:
return study_path
-def test_parse_output_parameters(tmp_path: Path) -> None:
- study = build_empty_files(tmp_path)
+def test_parse_output_parameters(study_path: Path) -> None:
content = """
[output]
synthesis = true
storenewset = true
archives =
"""
- (study / "settings/generaldata.ini").write_text(content)
+ (study_path / "settings/generaldata.ini").write_text(content)
config = FileStudyTreeConfig(
- study_path=study,
- path=study,
+ study_path=study_path,
+ path=study_path,
version=-1,
store_new_set=True,
study_id="id",
- output_path=study / "output",
+ output_path=study_path / "output",
)
- assert build(study, "id") == config
+ assert build(study_path, "id") == config
-def test_parse_bindings(tmp_path: Path) -> None:
+def test_parse_bindings(study_path: Path) -> None:
# Setup files
- study_path = build_empty_files(tmp_path)
- content = """
+ content = """\
[bindA]
id = bindA
@@ -81,7 +84,7 @@ def test_parse_bindings(tmp_path: Path) -> None:
id = bindB
type = weekly
"""
- (study_path / "input/bindingconstraints/bindingconstraints.ini").write_text(content)
+ (study_path / "input/bindingconstraints/bindingconstraints.ini").write_text(textwrap.dedent(content))
config = FileStudyTreeConfig(
study_path=study_path,
@@ -108,14 +111,13 @@ def test_parse_bindings(tmp_path: Path) -> None:
assert build(study_path, "id") == config
-def test_parse_outputs(tmp_path: Path) -> None:
- study_path = build_empty_files(tmp_path)
+def test_parse_outputs(study_path: Path) -> None:
output_path = study_path / "output/20201220-1456eco-hello/"
output_path.mkdir(parents=True)
(output_path / "about-the-study").mkdir()
file = output_path / "about-the-study/parameters.ini"
- content = """
+ content = """\
[general]
nbyears = 1
year-by-year = true
@@ -127,7 +129,7 @@ def test_parse_outputs(tmp_path: Path) -> None:
[playlist]
playlist_year + = 0
"""
- file.write_text(content)
+ file.write_text(textwrap.dedent(content))
(output_path / "checkIntegrity.txt").touch()
@@ -226,21 +228,19 @@ def test_parse_outputs__nominal(tmp_path: Path, assets_name: str, expected: Dict
assert actual == expected
-def test_parse_sets(tmp_path: Path) -> None:
- study_path = build_empty_files(tmp_path)
- content = """
-[hello]
-output = true
-+ = a
-+ = b
-"""
- (study_path / "input/areas/sets.ini").write_text(content)
+def test_parse_sets(study_path: Path) -> None:
+ content = """\
+ [hello]
+ output = true
+ + = a
+ + = b
+ """
+ (study_path / "input/areas/sets.ini").write_text(textwrap.dedent(content))
assert _parse_sets(study_path) == {"hello": DistrictSet(areas=["a", "b"], output=True, inverted_set=False)}
-def test_parse_area(tmp_path: Path) -> None:
- study_path = build_empty_files(tmp_path)
+def test_parse_area(study_path: Path) -> None:
(study_path / "input/areas/list.txt").write_text("FR\n")
(study_path / "input/areas/fr").mkdir(parents=True)
content = """
@@ -270,6 +270,51 @@ def test_parse_area(tmp_path: Path) -> None:
assert build(study_path, "id") == config
+def test_parse_area__extra_area(study_path: Path) -> None:
+ """
+ Test the case where an extra area is present in the `list.txt` file.
+
+ The extra area should be taken into account with default values to avoid any parsing error.
+ """
+
+ (study_path / "input/areas/list.txt").write_text("FR\nDE\n")
+ (study_path / "input/areas/fr").mkdir(parents=True)
+ content = """
+ [filtering]
+ filter-synthesis = daily, monthly
+ filter-year-by-year = hourly, weekly, annual
+ """
+ (study_path / "input/areas/fr/optimization.ini").write_text(content)
+
+ config = FileStudyTreeConfig(
+ study_path=study_path,
+ path=study_path,
+ study_id="id",
+ version=-1,
+ output_path=study_path / "output",
+ areas={
+ "fr": Area(
+ name="FR",
+ thermals=[],
+ renewables=[],
+ links={},
+ filters_year=["hourly", "weekly", "annual"],
+ filters_synthesis=["daily", "monthly"],
+ ),
+ "de": Area(
+ name="DE",
+ links={},
+ thermals=[],
+ renewables=[],
+ filters_synthesis=[],
+ filters_year=[],
+ st_storages=[],
+ ),
+ },
+ )
+ assert build(study_path, "id") == config
+
+
# noinspection SpellCheckingInspection
THERMAL_LIST_INI = """\
[t1]
@@ -286,8 +331,7 @@ def test_parse_area(tmp_path: Path) -> None:
"""
-def test_parse_thermal(tmp_path: Path) -> None:
- study_path = build_empty_files(tmp_path)
+def test_parse_thermal(study_path: Path) -> None:
study_path.joinpath("study.antares").write_text("[antares] \n version = 700")
ini_path = study_path.joinpath("input/thermal/clusters/fr/list.ini")
@@ -325,8 +369,7 @@ def test_parse_thermal(tmp_path: Path) -> None:
@pytest.mark.parametrize("version", [850, 860, 870])
-def test_parse_thermal_860(tmp_path: Path, version, caplog) -> None:
- study_path = build_empty_files(tmp_path)
+def test_parse_thermal_860(study_path: Path, version, caplog) -> None:
study_path.joinpath("study.antares").write_text(f"[antares] \n version = {version}")
ini_path = study_path.joinpath("input/thermal/clusters/fr/list.ini")
ini_path.parent.mkdir(parents=True)
@@ -361,8 +404,7 @@ def test_parse_thermal_860(tmp_path: Path, version, caplog) -> None:
"""
-def test_parse_renewables(tmp_path: Path) -> None:
- study_path = build_empty_files(tmp_path)
+def test_parse_renewables(study_path: Path) -> None:
study_path.joinpath("study.antares").write_text("[antares] \n version = 810")
ini_path = study_path.joinpath("input/renewables/clusters/fr/list.ini")
@@ -411,8 +453,7 @@ def test_parse_renewables(tmp_path: Path) -> None:
"""
-def test_parse_st_storage(tmp_path: Path) -> None:
- study_path = build_empty_files(tmp_path)
+def test_parse_st_storage(study_path: Path) -> None:
study_path.joinpath("study.antares").write_text("[antares] \n version = 860")
config_dir = study_path.joinpath("input", "st-storage", "clusters", "fr")
config_dir.mkdir(parents=True)
@@ -452,8 +493,7 @@ def test_parse_st_storage_with_no_file(tmp_path: Path) -> None:
assert _parse_st_storage(tmp_path, "") == []
-def test_parse_links(tmp_path: Path) -> None:
- study_path = build_empty_files(tmp_path)
+def test_parse_links(study_path: Path) -> None:
(study_path / "input/links/fr").mkdir(parents=True)
content = """
[l1]
@@ -463,4 +503,4 @@ def test_parse_links(tmp_path: Path) -> None:
(study_path / "input/links/fr/properties.ini").write_text(content)
link = Link(filters_synthesis=["annual"], filters_year=["hourly"])
- assert _parse_links(study_path, "fr") == {"l1": link}
+ assert _parse_links_filtering(study_path, "fr") == {"l1": link}