From 61d9fc868e61aa2ea8468e0d163a6e7b7b47d1dc Mon Sep 17 00:00:00 2001 From: MartinBelthle <102529366+martinbelthle@users.noreply.github.com> Date: Sat, 9 Mar 2024 15:49:42 +0100 Subject: [PATCH] fix(st-storages): use command when updating matrices (#1971) Resolves [ANT-1352] (cherry picked from commit b3f654a658a2309c80512ab294ad724875f4c126) --- antarest/core/exceptions.py | 24 ++- .../business/areas/st_storage_management.py | 63 +++++-- docs/CHANGELOG.md | 1 + .../study_data_blueprint/test_st_storage.py | 169 ++++++++++-------- .../areas/test_st_storage_management.py | 27 ++- 5 files changed, 188 insertions(+), 96 deletions(-) diff --git a/antarest/core/exceptions.py b/antarest/core/exceptions.py index 4d6c1a2f5f..1755d001df 100644 --- a/antarest/core/exceptions.py +++ b/antarest/core/exceptions.py @@ -34,13 +34,35 @@ class STStorageConfigNotFoundError(HTTPException): """Configuration for short-term storage is not found""" def __init__(self, study_id: str, area_id: str) -> None: - detail = f"The short-term storage configuration of area '{area_id}' not found:" + detail = f"The short-term storage configuration of area '{area_id}' not found" super().__init__(HTTPStatus.NOT_FOUND, detail) def __str__(self) -> str: return self.detail +class STStorageNotFoundError(HTTPException): + """Short-term storage is not found""" + + def __init__(self, study_id: str, area_id: str, st_storage_id: str) -> None: + detail = f"Short-term storage '{st_storage_id}' not found in area '{area_id}'" + super().__init__(HTTPStatus.NOT_FOUND, detail) + + def __str__(self) -> str: + return self.detail + + +class DuplicateSTStorageId(HTTPException): + """Exception raised when trying to create a short-term storage with an already existing id.""" + + def __init__(self, study_id: str, area_id: str, st_storage_id: str) -> None: + detail = f"Short term storage '{st_storage_id}' already exists in area '{area_id}'" + super().__init__(HTTPStatus.CONFLICT, detail) + + def __str__(self) -> str: + return self.detail + + class UnknownModuleError(Exception): def __init__(self, message: str) -> None: super(UnknownModuleError, self).__init__(message) diff --git a/antarest/study/business/areas/st_storage_management.py b/antarest/study/business/areas/st_storage_management.py index ca498c030a..7109d8c668 100644 --- a/antarest/study/business/areas/st_storage_management.py +++ b/antarest/study/business/areas/st_storage_management.py @@ -8,10 +8,13 @@ from typing_extensions import Literal from antarest.core.exceptions import ( + AreaNotFound, ClusterAlreadyExists, + DuplicateSTStorageId, STStorageConfigNotFoundError, STStorageFieldsNotFoundError, STStorageMatrixNotFoundError, + STStorageNotFoundError, ) from antarest.study.business.utils import AllOptionalMetaclass, camel_case_model, execute_or_add_commands from antarest.study.model import Study @@ -262,6 +265,7 @@ def create_storage( """ file_study = self._get_file_study(study) storage = form.to_config(study.version) + _check_creation_feasibility(file_study, area_id, storage.id) command = self._make_create_cluster_cmd(area_id, storage) execute_or_add_commands( study, @@ -357,18 +361,11 @@ def update_storage( """ study_version = study.version - # review: reading the configuration poses a problem for variants, - # because it requires generating a snapshot, which takes time. - # This reading could be avoided if we don't need the previous values - # (no cross-field validation, no default values, etc.). - # In return, we won't be able to return a complete `STStorageOutput` object. - # So, we need to make sure the frontend doesn't need the missing fields. - # This missing information could also be a problem for the API users. - # The solution would be to avoid reading the configuration if the study is a variant - # (we then use the default values), otherwise, for a RAW study, we read the configuration - # and update the modified values. + # For variants, this method requires generating a snapshot, which takes time. + # But sadly, there's no other way to prevent creating wrong commands. file_study = self._get_file_study(study) + _check_update_feasibility(file_study, area_id, storage_id) path = STORAGE_LIST_PATH.format(area_id=area_id, storage_id=storage_id) try: @@ -415,6 +412,9 @@ def delete_storages( area_id: The area ID of the short-term storage. storage_ids: IDs list of short-term storages to remove. """ + file_study = self._get_file_study(study) + _check_deletion_feasibility(file_study, area_id, storage_ids) + command_context = self.storage_service.variant_study_service.command_factory.command_context for storage_id in storage_ids: command = RemoveSTStorage( @@ -422,7 +422,6 @@ def delete_storages( storage_id=storage_id, command_context=command_context, ) - file_study = self._get_file_study(study) execute_or_add_commands(study, file_study, [command], self.storage_service) def duplicate_cluster(self, study: Study, area_id: str, source_id: str, new_cluster_name: str) -> STStorageOutput: @@ -455,6 +454,7 @@ def duplicate_cluster(self, study: Study, area_id: str, source_id: str, new_clus # Matrix edition lower_source_id = source_id.lower() + # noinspection SpellCheckingInspection ts_names = ["pmax_injection", "pmax_withdrawal", "lower_rule_curve", "upper_rule_curve", "inflows"] source_paths = [ STORAGE_SERIES_PATH.format(area_id=area_id, storage_id=lower_source_id, ts_name=ts_name) @@ -533,8 +533,7 @@ def update_matrix( ts_name: Name of the time series to update. ts: Matrix of the time series to update. """ - matrix_object = ts.dict() - self._save_matrix_obj(study, area_id, storage_id, ts_name, matrix_object) + self._save_matrix_obj(study, area_id, storage_id, ts_name, ts.data) def _save_matrix_obj( self, @@ -542,13 +541,13 @@ def _save_matrix_obj( area_id: str, storage_id: str, ts_name: STStorageTimeSeries, - matrix_obj: t.Dict[str, t.Any], + matrix_data: t.List[t.List[float]], ) -> None: - path = STORAGE_SERIES_PATH.format(area_id=area_id, storage_id=storage_id, ts_name=ts_name) - matrix = matrix_obj["data"] + file_study = self._get_file_study(study) command_context = self.storage_service.variant_study_service.command_factory.command_context - command = ReplaceMatrix(target=path, matrix=matrix, command_context=command_context) - execute_or_add_commands(study, self._get_file_study(study), [command], self.storage_service) + path = STORAGE_SERIES_PATH.format(area_id=area_id, storage_id=storage_id, ts_name=ts_name) + command = ReplaceMatrix(target=path, matrix=matrix_data, command_context=command_context) + execute_or_add_commands(study, file_study, [command], self.storage_service) def validate_matrices( self, @@ -593,3 +592,31 @@ def validate_matrices( # Validation successful return True + + +def _get_existing_storage_ids(file_study: FileStudy, area_id: str) -> t.Set[str]: + try: + area = file_study.config.areas[area_id] + except KeyError: + raise AreaNotFound(area_id) from None + else: + return {s.id for s in area.st_storages} + + +def _check_deletion_feasibility(file_study: FileStudy, area_id: str, storage_ids: t.Sequence[str]) -> None: + existing_ids = _get_existing_storage_ids(file_study, area_id) + for storage_id in storage_ids: + if storage_id not in existing_ids: + raise STStorageNotFoundError(file_study.config.study_id, area_id, storage_id) + + +def _check_update_feasibility(file_study: FileStudy, area_id: str, storage_id: str) -> None: + existing_ids = _get_existing_storage_ids(file_study, area_id) + if storage_id not in existing_ids: + raise STStorageNotFoundError(file_study.config.study_id, area_id, storage_id) + + +def _check_creation_feasibility(file_study: FileStudy, area_id: str, storage_id: str) -> None: + existing_ids = _get_existing_storage_ids(file_study, area_id) + if storage_id in existing_ids: + raise DuplicateSTStorageId(file_study.config.study_id, area_id, storage_id) diff --git a/docs/CHANGELOG.md b/docs/CHANGELOG.md index 41a9028209..3a8fb03cd3 100644 --- a/docs/CHANGELOG.md +++ b/docs/CHANGELOG.md @@ -12,6 +12,7 @@ v2.16.8 (2024-04-19) * **st-storages (ui):** correction of incorrect wording between "withdrawal" and "injection" [`#1977`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1977) * **st-storages (ui):** change matrix titles [`#1994`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1994) +* **st-storages:** use command when updating matrices [`#1971`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/1971) v2.16.7 (2024-03-05) -------------------- diff --git a/tests/integration/study_data_blueprint/test_st_storage.py b/tests/integration/study_data_blueprint/test_st_storage.py index 5f2421d911..161e6417b8 100644 --- a/tests/integration/study_data_blueprint/test_st_storage.py +++ b/tests/integration/study_data_blueprint/test_st_storage.py @@ -29,11 +29,9 @@ class TestSTStorage: which contains the following areas: ["de", "es", "fr", "it"]. """ + @pytest.mark.parametrize("study_type", ["raw", "variant"]) def test_lifecycle__nominal( - self, - client: TestClient, - user_access_token: str, - study_id: str, + self, client: TestClient, user_access_token: str, study_id: str, study_type: str ) -> None: """ The purpose of this integration test is to test the endpoints @@ -59,10 +57,15 @@ def test_lifecycle__nominal( We will test the deletion of short-term storages. """ + # ============================= + # SET UP + # ============================= + user_headers = {"Authorization": f"Bearer {user_access_token}"} + # Upgrade study to version 860 res = client.put( f"/v1/studies/{study_id}/upgrade", - headers={"Authorization": f"Bearer {user_access_token}"}, + headers=user_headers, params={"target_version": 860}, ) res.raise_for_status() @@ -70,6 +73,25 @@ def test_lifecycle__nominal( task = wait_task_completion(client, user_access_token, task_id) assert task.status == TaskStatus.COMPLETED, task + # Copies the study, to convert it into a managed one. + res = client.post( + f"/v1/studies/{study_id}/copy", + headers={"Authorization": f"Bearer {user_access_token}"}, + params={"dest": "default", "with_outputs": False, "use_task": False}, # type: ignore + ) + assert res.status_code == 201, res.json() + study_id = res.json() + + if study_type == "variant": + # Create Variant + res = client.post( + f"/v1/studies/{study_id}/variants", + headers=user_headers, + params={"name": "Variant 1"}, + ) + assert res.status_code in {200, 201}, res.json() + study_id = res.json() + # ============================= # SHORT-TERM STORAGE CREATION # ============================= @@ -85,7 +107,7 @@ def test_lifecycle__nominal( for attempt in attempts: res = client.post( f"/v1/studies/{study_id}/areas/{area_id}/storages", - headers={"Authorization": f"Bearer {user_access_token}"}, + headers=user_headers, json=attempt, ) assert res.status_code == 422, res.json() @@ -102,7 +124,7 @@ def test_lifecycle__nominal( } res = client.post( f"/v1/studies/{study_id}/areas/{area_id}/storages", - headers={"Authorization": f"Bearer {user_access_token}"}, + headers=user_headers, json=siemens_properties, ) assert res.status_code == 200, res.json() @@ -114,7 +136,7 @@ def test_lifecycle__nominal( # reading the properties of a short-term storage res = client.get( f"/v1/studies/{study_id}/areas/{area_id}/storages/{siemens_battery_id}", - headers={"Authorization": f"Bearer {user_access_token}"}, + headers=user_headers, ) assert res.status_code == 200, res.json() assert res.json() == siemens_config @@ -128,7 +150,7 @@ def test_lifecycle__nominal( array_list = array.tolist() res = client.put( f"/v1/studies/{study_id}/areas/{area_id}/storages/{siemens_battery_id}/series/inflows", - headers={"Authorization": f"Bearer {user_access_token}"}, + headers=user_headers, json={ "index": list(range(array.shape[0])), "columns": list(range(array.shape[1])), @@ -141,7 +163,7 @@ def test_lifecycle__nominal( # reading the matrix of a short-term storage res = client.get( f"/v1/studies/{study_id}/areas/{area_id}/storages/{siemens_battery_id}/series/inflows", - headers={"Authorization": f"Bearer {user_access_token}"}, + headers=user_headers, ) assert res.status_code == 200, res.json() matrix = res.json() @@ -151,7 +173,7 @@ def test_lifecycle__nominal( # validating the matrices of a short-term storage res = client.get( f"/v1/studies/{study_id}/areas/{area_id}/storages/{siemens_battery_id}/validate", - headers={"Authorization": f"Bearer {user_access_token}"}, + headers=user_headers, ) assert res.status_code == 200, res.json() assert res.json() is True @@ -163,7 +185,7 @@ def test_lifecycle__nominal( # Reading the list of short-term storages res = client.get( f"/v1/studies/{study_id}/areas/{area_id}/storages", - headers={"Authorization": f"Bearer {user_access_token}"}, + headers=user_headers, ) assert res.status_code == 200, res.json() assert res.json() == [siemens_config] @@ -171,7 +193,7 @@ def test_lifecycle__nominal( # updating properties res = client.patch( f"/v1/studies/{study_id}/areas/{area_id}/storages/{siemens_battery_id}", - headers={"Authorization": f"Bearer {user_access_token}"}, + headers=user_headers, json={ "name": "New Siemens Battery", "reservoirCapacity": 2500, @@ -187,7 +209,7 @@ def test_lifecycle__nominal( res = client.get( f"/v1/studies/{study_id}/areas/{area_id}/storages/{siemens_battery_id}", - headers={"Authorization": f"Bearer {user_access_token}"}, + headers=user_headers, ) assert res.status_code == 200, res.json() assert res.json() == siemens_config @@ -199,7 +221,7 @@ def test_lifecycle__nominal( # updating properties res = client.patch( f"/v1/studies/{study_id}/areas/{area_id}/storages/{siemens_battery_id}", - headers={"Authorization": f"Bearer {user_access_token}"}, + headers=user_headers, json={ "initialLevel": 0.59, "reservoirCapacity": 0, @@ -219,7 +241,7 @@ def test_lifecycle__nominal( bad_properties = {"efficiency": 2.0} res = client.patch( f"/v1/studies/{study_id}/areas/{area_id}/storages/{siemens_battery_id}", - headers={"Authorization": f"Bearer {user_access_token}"}, + headers=user_headers, json=bad_properties, ) assert res.status_code == 422, res.json() @@ -228,7 +250,7 @@ def test_lifecycle__nominal( # The short-term storage properties should not have been updated. res = client.get( f"/v1/studies/{study_id}/areas/{area_id}/storages/{siemens_battery_id}", - headers={"Authorization": f"Bearer {user_access_token}"}, + headers=user_headers, ) assert res.status_code == 200, res.json() assert res.json() == siemens_config @@ -267,7 +289,7 @@ def test_lifecycle__nominal( res = client.request( "DELETE", f"/v1/studies/{study_id}/areas/{area_id}/storages", - headers={"Authorization": f"Bearer {user_access_token}"}, + headers=user_headers, json=[siemens_battery_id], ) assert res.status_code == 204, res.json() @@ -277,7 +299,7 @@ def test_lifecycle__nominal( res = client.request( "DELETE", f"/v1/studies/{study_id}/areas/{area_id}/storages", - headers={"Authorization": f"Bearer {user_access_token}"}, + headers=user_headers, json=[], ) assert res.status_code == 204, res.json() @@ -297,7 +319,7 @@ def test_lifecycle__nominal( } res = client.post( f"/v1/studies/{study_id}/areas/{area_id}/storages", - headers={"Authorization": f"Bearer {user_access_token}"}, + headers=user_headers, json=siemens_properties, ) assert res.status_code == 200, res.json() @@ -316,7 +338,7 @@ def test_lifecycle__nominal( } res = client.post( f"/v1/studies/{study_id}/areas/{area_id}/storages", - headers={"Authorization": f"Bearer {user_access_token}"}, + headers=user_headers, json=grand_maison_properties, ) assert res.status_code == 200, res.json() @@ -326,7 +348,7 @@ def test_lifecycle__nominal( # Reading the list of short-term storages res = client.get( f"/v1/studies/{study_id}/areas/{area_id}/storages", - headers={"Authorization": f"Bearer {user_access_token}"}, + headers=user_headers, ) assert res.status_code == 200, res.json() siemens_config = {**DEFAULT_PROPERTIES, **siemens_properties, "id": siemens_battery_id} @@ -337,7 +359,7 @@ def test_lifecycle__nominal( res = client.request( "DELETE", f"/v1/studies/{study_id}/areas/{area_id}/storages", - headers={"Authorization": f"Bearer {user_access_token}"}, + headers=user_headers, json=[grand_maison_id, duplicated_config["id"]], ) assert res.status_code == 204, res.json() @@ -346,7 +368,7 @@ def test_lifecycle__nominal( # Only one st-storage should remain. res = client.get( f"/v1/studies/{study_id}/areas/{area_id}/storages", - headers={"Authorization": f"Bearer {user_access_token}"}, + headers=user_headers, ) assert res.status_code == 200, res.json() assert len(res.json()) == 1 @@ -360,25 +382,21 @@ def test_lifecycle__nominal( res = client.request( "DELETE", f"/v1/studies/{study_id}/areas/{bad_area_id}/storages", - headers={"Authorization": f"Bearer {user_access_token}"}, + headers=user_headers, json=[siemens_battery_id], ) - assert res.status_code == 500, res.json() + assert res.status_code == 404 obj = res.json() - description = obj["description"] - assert bad_area_id in description - assert re.search( - r"CommandName.REMOVE_ST_STORAGE", - description, - flags=re.IGNORECASE, - ) + + assert obj["description"] == f"Area is not found: '{bad_area_id}'" + assert obj["exception"] == "AreaNotFound" # Check delete with the wrong value of `study_id` bad_study_id = "bad_study" res = client.request( "DELETE", f"/v1/studies/{bad_study_id}/areas/{area_id}/storages", - headers={"Authorization": f"Bearer {user_access_token}"}, + headers=user_headers, json=[siemens_battery_id], ) obj = res.json() @@ -389,7 +407,7 @@ def test_lifecycle__nominal( # Check get with wrong `area_id` res = client.get( f"/v1/studies/{study_id}/areas/{bad_area_id}/storages/{siemens_battery_id}", - headers={"Authorization": f"Bearer {user_access_token}"}, + headers=user_headers, ) obj = res.json() description = obj["description"] @@ -399,7 +417,7 @@ def test_lifecycle__nominal( # Check get with wrong `study_id` res = client.get( f"/v1/studies/{bad_study_id}/areas/{area_id}/storages/{siemens_battery_id}", - headers={"Authorization": f"Bearer {user_access_token}"}, + headers=user_headers, ) obj = res.json() description = obj["description"] @@ -409,7 +427,7 @@ def test_lifecycle__nominal( # Check POST with wrong `study_id` res = client.post( f"/v1/studies/{bad_study_id}/areas/{area_id}/storages", - headers={"Authorization": f"Bearer {user_access_token}"}, + headers=user_headers, json={"name": siemens_battery, "group": "Battery"}, ) obj = res.json() @@ -420,20 +438,18 @@ def test_lifecycle__nominal( # Check POST with wrong `area_id` res = client.post( f"/v1/studies/{study_id}/areas/{bad_area_id}/storages", - headers={"Authorization": f"Bearer {user_access_token}"}, + headers=user_headers, json={"name": siemens_battery, "group": "Battery"}, ) - assert res.status_code == 500, res.json() + assert res.status_code == 404 obj = res.json() - description = obj["description"] - assert bad_area_id in description - assert re.search(r"Area ", description, flags=re.IGNORECASE) - assert re.search(r"does not exist ", description, flags=re.IGNORECASE) + assert obj["description"] == f"Area is not found: '{bad_area_id}'" + assert obj["exception"] == "AreaNotFound" # Check POST with wrong `group` res = client.post( f"/v1/studies/{study_id}/areas/{area_id}/storages", - headers={"Authorization": f"Bearer {user_access_token}"}, + headers=user_headers, json={"name": siemens_battery, "group": "GroupFoo"}, ) assert res.status_code == 422, res.json() @@ -444,33 +460,30 @@ def test_lifecycle__nominal( # Check PATCH with the wrong `area_id` res = client.patch( f"/v1/studies/{study_id}/areas/{bad_area_id}/storages/{siemens_battery_id}", - headers={"Authorization": f"Bearer {user_access_token}"}, + headers=user_headers, json={"efficiency": 1.0}, ) - assert res.status_code == 404, res.json() + assert res.status_code == 404 obj = res.json() - description = obj["description"] - assert bad_area_id in description - assert re.search(r"not a child of ", description, flags=re.IGNORECASE) + assert obj["description"] == f"Area is not found: '{bad_area_id}'" + assert obj["exception"] == "AreaNotFound" # Check PATCH with the wrong `storage_id` bad_storage_id = "bad_storage" res = client.patch( f"/v1/studies/{study_id}/areas/{area_id}/storages/{bad_storage_id}", - headers={"Authorization": f"Bearer {user_access_token}"}, + headers=user_headers, json={"efficiency": 1.0}, ) - assert res.status_code == 404, res.json() + assert res.status_code == 404 obj = res.json() - description = obj["description"] - assert bad_storage_id in description - assert re.search(r"fields of storage", description, flags=re.IGNORECASE) - assert re.search(r"not found", description, flags=re.IGNORECASE) + assert obj["description"] == f"Short-term storage '{bad_storage_id}' not found in area '{area_id}'" + assert obj["exception"] == "STStorageNotFoundError" # Check PATCH with the wrong `study_id` res = client.patch( f"/v1/studies/{bad_study_id}/areas/{area_id}/storages/{siemens_battery_id}", - headers={"Authorization": f"Bearer {user_access_token}"}, + headers=user_headers, json={"efficiency": 1.0}, ) assert res.status_code == 404, res.json() @@ -478,7 +491,7 @@ def test_lifecycle__nominal( description = obj["description"] assert bad_study_id in description - # Cannot duplicate a fake st-storage + # Cannot duplicate a unknown st-storage unknown_id = "unknown" res = client.post( f"/v1/studies/{study_id}/areas/{area_id}/storages/{unknown_id}", @@ -502,11 +515,8 @@ def test_lifecycle__nominal( assert siemens_battery.lower() in description assert obj["exception"] == "ClusterAlreadyExists" - def test__default_values( - self, - client: TestClient, - user_access_token: str, - ) -> None: + @pytest.mark.parametrize("study_type", ["raw", "variant"]) + def test__default_values(self, client: TestClient, user_access_token: str, study_type: str) -> None: """ The purpose of this integration test is to test the default values of the properties of a short-term storage. @@ -516,18 +526,29 @@ def test__default_values( Then the short-term storage is created with initialLevel = 0.0, and initialLevelOptim = False. """ # Create a new study in version 860 (or higher) + user_headers = {"Authorization": f"Bearer {user_access_token}"} res = client.post( "/v1/studies", - headers={"Authorization": f"Bearer {user_access_token}"}, + headers=user_headers, params={"name": "MyStudy", "version": 860}, ) assert res.status_code in {200, 201}, res.json() study_id = res.json() + if study_type == "variant": + # Create Variant + res = client.post( + f"/v1/studies/{study_id}/variants", + headers=user_headers, + params={"name": "Variant 1"}, + ) + assert res.status_code in {200, 201}, res.json() + study_id = res.json() + # Create a new area named "FR" res = client.post( f"/v1/studies/{study_id}/areas", - headers={"Authorization": f"Bearer {user_access_token}"}, + headers=user_headers, json={"name": "FR", "type": "AREA"}, ) assert res.status_code in {200, 201}, res.json() @@ -537,7 +558,7 @@ def test__default_values( tesla_battery = "Tesla Battery" res = client.post( f"/v1/studies/{study_id}/areas/{area_id}/storages", - headers={"Authorization": f"Bearer {user_access_token}"}, + headers=user_headers, json={"name": tesla_battery, "group": "Battery"}, ) assert res.status_code == 200, res.json() @@ -549,7 +570,7 @@ def test__default_values( # are properly set in the configuration file. res = client.get( f"/v1/studies/{study_id}/raw", - headers={"Authorization": f"Bearer {user_access_token}"}, + headers=user_headers, params={"path": f"input/st-storage/clusters/{area_id}/list/{tesla_battery_id}"}, ) assert res.status_code == 200, res.json() @@ -564,7 +585,7 @@ def test__default_values( # Create a variant of the study res = client.post( f"/v1/studies/{study_id}/variants", - headers={"Authorization": f"Bearer {user_access_token}"}, + headers=user_headers, params={"name": "MyVariant"}, ) assert res.status_code in {200, 201}, res.json() @@ -574,7 +595,7 @@ def test__default_values( siemens_battery = "Siemens Battery" res = client.post( f"/v1/studies/{variant_id}/areas/{area_id}/storages", - headers={"Authorization": f"Bearer {user_access_token}"}, + headers=user_headers, json={"name": siemens_battery, "group": "Battery"}, ) assert res.status_code == 200, res.json() @@ -582,7 +603,7 @@ def test__default_values( # Check the variant commands res = client.get( f"/v1/studies/{variant_id}/commands", - headers={"Authorization": f"Bearer {user_access_token}"}, + headers=user_headers, ) assert res.status_code == 200, res.json() commands = res.json() @@ -608,7 +629,7 @@ def test__default_values( siemens_battery_id = transform_name_to_id(siemens_battery) res = client.patch( f"/v1/studies/{variant_id}/areas/{area_id}/storages/{siemens_battery_id}", - headers={"Authorization": f"Bearer {user_access_token}"}, + headers=user_headers, json={"initialLevel": 0.5}, ) assert res.status_code == 200, res.json() @@ -616,7 +637,7 @@ def test__default_values( # Check the variant commands res = client.get( f"/v1/studies/{variant_id}/commands", - headers={"Authorization": f"Bearer {user_access_token}"}, + headers=user_headers, ) assert res.status_code == 200, res.json() commands = res.json() @@ -636,7 +657,7 @@ def test__default_values( # Update the initialLevel property of the "Siemens Battery" short-term storage back to 0 res = client.patch( f"/v1/studies/{variant_id}/areas/{area_id}/storages/{siemens_battery_id}", - headers={"Authorization": f"Bearer {user_access_token}"}, + headers=user_headers, json={"initialLevel": 0.0, "injectionNominalCapacity": 1600}, ) assert res.status_code == 200, res.json() @@ -644,7 +665,7 @@ def test__default_values( # Check the variant commands res = client.get( f"/v1/studies/{variant_id}/commands", - headers={"Authorization": f"Bearer {user_access_token}"}, + headers=user_headers, ) assert res.status_code == 200, res.json() commands = res.json() @@ -671,7 +692,7 @@ def test__default_values( # are properly set in the configuration file. res = client.get( f"/v1/studies/{variant_id}/raw", - headers={"Authorization": f"Bearer {user_access_token}"}, + headers=user_headers, params={"path": f"input/st-storage/clusters/{area_id}/list/{siemens_battery_id}"}, ) assert res.status_code == 200, res.json() diff --git a/tests/study/business/areas/test_st_storage_management.py b/tests/study/business/areas/test_st_storage_management.py index 646dc26c78..5c3e7e660c 100644 --- a/tests/study/business/areas/test_st_storage_management.py +++ b/tests/study/business/areas/test_st_storage_management.py @@ -11,16 +11,19 @@ from sqlalchemy.orm.session import Session # type: ignore from antarest.core.exceptions import ( + AreaNotFound, STStorageConfigNotFoundError, STStorageFieldsNotFoundError, STStorageMatrixNotFoundError, + STStorageNotFoundError, ) from antarest.core.model import PublicMode from antarest.login.model import Group, User from antarest.study.business.areas.st_storage_management import STStorageInput, STStorageManager from antarest.study.model import RawStudy, Study, StudyContentStatus from antarest.study.storage.rawstudy.ini_reader import IniReader -from antarest.study.storage.rawstudy.model.filesystem.config.st_storage import STStorageGroup +from antarest.study.storage.rawstudy.model.filesystem.config.model import Area, FileStudyTreeConfig +from antarest.study.storage.rawstudy.model.filesystem.config.st_storage import STStorageConfig, STStorageGroup from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy from antarest.study.storage.rawstudy.model.filesystem.ini_file_node import IniFileNode from antarest.study.storage.rawstudy.model.filesystem.root.filestudytree import FileStudyTree @@ -287,11 +290,29 @@ def test_update_storage__nominal_case( get_node=Mock(return_value=ini_file_node), ) + area = Mock(spec=Area) + mock_config = Mock(spec=FileStudyTreeConfig, study_id=study.id) + file_study.config = mock_config + # Given the following arguments manager = STStorageManager(study_storage_service) - - # Run the method being tested edit_form = STStorageInput(initial_level=0, initial_level_optim=False) + + # Test behavior for area not in study + mock_config.areas = {"fake_area": area} + with pytest.raises(AreaNotFound) as ctx: + manager.update_storage(study, area_id="West", storage_id="storage1", form=edit_form) + assert ctx.value.detail == "Area is not found: 'West'" + + # Test behavior for st_storage not in study + mock_config.areas = {"West": area} + area.st_storages = [STStorageConfig(name="fake_name", group="battery")] + with pytest.raises(STStorageNotFoundError) as ctx: + manager.update_storage(study, area_id="West", storage_id="storage1", form=edit_form) + assert ctx.value.detail == "Short-term storage 'storage1' not found in area 'West'" + + # Test behavior for nominal case + area.st_storages = [STStorageConfig(name="storage1", group="battery")] manager.update_storage(study, area_id="West", storage_id="storage1", form=edit_form) # Assert that the storage fields have been updated