Skip to content

Commit

Permalink
feat(table-mode): implement table mode update (WIP)
Browse files Browse the repository at this point in the history
  • Loading branch information
laurent-laporte-pro committed Apr 3, 2024
1 parent afbf46e commit 64a6fa0
Show file tree
Hide file tree
Showing 19 changed files with 815 additions and 887 deletions.
49 changes: 30 additions & 19 deletions antarest/study/business/area_management.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
from pydantic import BaseModel, Extra, Field

from antarest.core.exceptions import ConfigFileNotFound, DuplicateAreaName, LayerNotAllowedToBeDeleted, LayerNotFound
from antarest.core.model import JSON
from antarest.study.business.utils import AllOptionalMetaclass, camel_case_model, execute_or_add_commands
from antarest.study.model import Patch, PatchArea, PatchCluster, RawStudy, Study
from antarest.study.repository import StudyMetadataRepository
Expand All @@ -16,6 +17,7 @@
AreaUI,
OptimizationProperties,
ThermalAreasProperties,
UIProperties,
)
from antarest.study.storage.rawstudy.model.filesystem.config.model import Area, DistrictSet, transform_name_to_id
from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy
Expand All @@ -40,6 +42,7 @@ class AreaCreationDTO(BaseModel):
set: t.Optional[t.List[str]]


# review: is this class necessary?
class ClusterInfoDTO(PatchCluster):
id: str
name: str
Expand Down Expand Up @@ -85,14 +88,20 @@ def _get_ui_info_map(file_study: FileStudy, area_ids: t.Sequence[str]) -> t.Dict
# instead of raising an obscure exception.
if not area_ids:
return {}

ui_info_map = file_study.tree.get(["input", "areas", ",".join(area_ids), "ui"])

# If there is only one ID in the `area_ids`, the result returned from
# the `file_study.tree.get` call will be a single UI object.
# On the other hand, if there are multiple values in `area_ids`,
# the result will be a dictionary where the keys are the IDs,
# and the values are the corresponding UI objects.
if len(area_ids) == 1:
ui_info_map = {area_ids[0]: ui_info_map}

# Convert to UIProperties to ensure that the UI object is valid.
ui_info_map = {area_id: UIProperties(**ui_info).to_config() for area_id, ui_info in ui_info_map.items()}

return ui_info_map


Expand Down Expand Up @@ -133,7 +142,7 @@ class _BaseAreaDTO(

# noinspection SpellCheckingInspection
@camel_case_model
class GetAreaDTO(_BaseAreaDTO, metaclass=AllOptionalMetaclass):
class AreaOutput(_BaseAreaDTO, metaclass=AllOptionalMetaclass):
"""
DTO object use to get the area information using a flat structure.
"""
Expand All @@ -145,7 +154,7 @@ def create_area_dto(
*,
average_unsupplied_energy_cost: float,
average_spilled_energy_cost: float,
) -> "GetAreaDTO":
) -> "AreaOutput":
"""
Creates a `GetAreaDTO` object from configuration data.
Expand Down Expand Up @@ -194,7 +203,7 @@ def __init__(
self.patch_service = PatchService(repository=repository)

# noinspection SpellCheckingInspection
def get_all_area_props(self, study: RawStudy) -> t.Mapping[str, GetAreaDTO]:
def get_all_area_props(self, study: RawStudy) -> t.Mapping[str, AreaOutput]:
"""
Retrieves all areas of a study.
Expand Down Expand Up @@ -232,14 +241,18 @@ def get_all_area_props(self, study: RawStudy) -> t.Mapping[str, GetAreaDTO]:
area_map = {}
for area_id, area_cfg in areas_cfg.items():
area_folder = AreaFolder(**area_cfg)
area_map[area_id] = GetAreaDTO.create_area_dto(
area_map[area_id] = AreaOutput.create_area_dto(
area_folder,
average_unsupplied_energy_cost=thermal_areas.unserverd_energy_cost.get(area_id, 0.0),
average_spilled_energy_cost=thermal_areas.spilled_energy_cost.get(area_id, 0.0),
)

return area_map

@staticmethod
def get_table_schema() -> JSON:
return AreaOutput.schema()

def get_all_areas(self, study: RawStudy, area_type: t.Optional[AreaType] = None) -> t.List[AreaInfoDTO]:
"""
Retrieves all areas and districts of a raw study based on the area type.
Expand Down Expand Up @@ -496,32 +509,33 @@ def update_area_metadata(
)

def update_area_ui(self, study: Study, area_id: str, area_ui: AreaUI, layer: str = "0") -> None:
obj = area_ui.to_config()
file_study = self.storage_service.get_storage(study).get_raw(study)
commands = (
[
UpdateConfig(
target=f"input/areas/{area_id}/ui/ui/x",
data=area_ui.x,
data=obj["x"],
command_context=self.storage_service.variant_study_service.command_factory.command_context,
),
UpdateConfig(
target=f"input/areas/{area_id}/ui/ui/y",
data=area_ui.y,
data=obj["y"],
command_context=self.storage_service.variant_study_service.command_factory.command_context,
),
UpdateConfig(
target=f"input/areas/{area_id}/ui/ui/color_r",
data=area_ui.color_rgb[0],
data=obj["color_r"],
command_context=self.storage_service.variant_study_service.command_factory.command_context,
),
UpdateConfig(
target=f"input/areas/{area_id}/ui/ui/color_g",
data=area_ui.color_rgb[1],
data=obj["color_g"],
command_context=self.storage_service.variant_study_service.command_factory.command_context,
),
UpdateConfig(
target=f"input/areas/{area_id}/ui/ui/color_b",
data=area_ui.color_rgb[2],
data=obj["color_b"],
command_context=self.storage_service.variant_study_service.command_factory.command_context,
),
]
Expand All @@ -532,17 +546,17 @@ def update_area_ui(self, study: Study, area_id: str, area_ui: AreaUI, layer: str
[
UpdateConfig(
target=f"input/areas/{area_id}/ui/layerX/{layer}",
data=area_ui.x,
data=obj["x"],
command_context=self.storage_service.variant_study_service.command_factory.command_context,
),
UpdateConfig(
target=f"input/areas/{area_id}/ui/layerY/{layer}",
data=area_ui.y,
data=obj["y"],
command_context=self.storage_service.variant_study_service.command_factory.command_context,
),
UpdateConfig(
target=f"input/areas/{area_id}/ui/layerColor/{layer}",
data=f"{str(area_ui.color_rgb[0])} , {str(area_ui.color_rgb[1])} , {str(area_ui.color_rgb[2])}",
data=f"{obj['color_r']},{obj['color_g']},{obj['color_b']}",
command_context=self.storage_service.variant_study_service.command_factory.command_context,
),
]
Expand Down Expand Up @@ -593,11 +607,8 @@ def _update_with_cluster_metadata(
def _get_clusters(file_study: FileStudy, area: str, metadata_patch: Patch) -> t.List[ClusterInfoDTO]:
thermal_clusters_data = file_study.tree.get(["input", "thermal", "clusters", area, "list"])
cluster_patch = metadata_patch.thermal_clusters or {}
return [
AreaManager._update_with_cluster_metadata(
area,
ClusterInfoDTO.parse_obj({**thermal_clusters_data[tid], "id": tid}),
cluster_patch,
)
for tid in thermal_clusters_data
result = [
AreaManager._update_with_cluster_metadata(area, ClusterInfoDTO(id=tid, **obj), cluster_patch)
for tid, obj in thermal_clusters_data.items()
]
return result
68 changes: 50 additions & 18 deletions antarest/study/business/areas/renewable_management.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
import collections
import json
import typing as t

from pydantic import validator

from antarest.core.exceptions import DuplicateRenewableCluster, RenewableClusterConfigNotFound, RenewableClusterNotFound
from antarest.core.model import JSON
from antarest.study.business.enum_ignore_case import EnumIgnoreCase
from antarest.study.business.utils import AllOptionalMetaclass, camel_case_model, execute_or_add_commands
from antarest.study.model import Study
Expand All @@ -21,14 +23,6 @@
from antarest.study.storage.variantstudy.model.command.replace_matrix import ReplaceMatrix
from antarest.study.storage.variantstudy.model.command.update_config import UpdateConfig

__all__ = (
"RenewableClusterInput",
"RenewableClusterCreation",
"RenewableClusterOutput",
"RenewableManager",
"TimeSeriesInterpretation",
)

_CLUSTER_PATH = "input/renewables/clusters/{area_id}/list/{cluster_id}"
_CLUSTERS_PATH = "input/renewables/clusters/{area_id}/list"
_ALL_CLUSTERS_PATH = "input/renewables/clusters"
Expand Down Expand Up @@ -148,15 +142,15 @@ def get_clusters(self, study: Study, area_id: str) -> t.Sequence[RenewableCluste
def get_all_renewables_props(
self,
study: Study,
) -> t.Mapping[str, t.Sequence[RenewableClusterOutput]]:
) -> t.Mapping[str, t.Mapping[str, RenewableClusterOutput]]:
"""
Retrieve all renewable clusters from all areas within a study.
Args:
study: Study from which to retrieve the clusters.
Returns:
A mapping of area IDs to lists of renewable clusters within the specified area.
A mapping of area IDs to a mapping of cluster IDs to cluster output.
Raises:
RenewableClusterConfigNotFound: If no clusters are found in the specified area.
Expand All @@ -173,14 +167,13 @@ def get_all_renewables_props(
raise RenewableClusterConfigNotFound(path)

study_version = study.version
all_clusters = {
area_id: [
create_renewable_output(study_version, cluster_id, cluster)
for cluster_id, cluster in cluster_obj.items()
]
for area_id, cluster_obj in clusters.items()
}
return all_clusters
renewables_by_areas: t.MutableMapping[str, t.MutableMapping[str, RenewableClusterOutput]]
renewables_by_areas = collections.defaultdict(dict)
for area_id, cluster_obj in clusters.items():
for cluster_id, cluster in cluster_obj.items():
renewables_by_areas[area_id][cluster_id] = create_renewable_output(study_version, cluster_id, cluster)

return renewables_by_areas

def create_cluster(
self, study: Study, area_id: str, cluster_data: RenewableClusterCreation
Expand Down Expand Up @@ -365,3 +358,42 @@ def duplicate_cluster(
execute_or_add_commands(study, self._get_file_study(study), commands, self.storage_service)

return RenewableClusterOutput(**new_config.dict(by_alias=False))

def update_renewables_props(
self,
study: Study,
update_renewables_by_areas: t.Mapping[str, t.Mapping[str, RenewableClusterInput]],
) -> t.Mapping[str, t.Mapping[str, RenewableClusterOutput]]:
old_renewables_by_areas = self.get_all_renewables_props(study)
new_renewables_by_areas = {area_id: dict(clusters) for area_id, clusters in old_renewables_by_areas.items()}

# Prepare the commands to update the renewable clusters.
commands = []
for area_id, update_renewables_by_id in update_renewables_by_areas.items():
old_renewables_by_id = old_renewables_by_areas[area_id]
for renewable_id, update_cluster in update_renewables_by_id.items():
# Update the renewable cluster properties.
old_cluster = old_renewables_by_id[renewable_id]
new_cluster = old_cluster.copy(update=update_cluster.dict(by_alias=False, exclude_none=True))
new_renewables_by_areas[area_id][renewable_id] = new_cluster

# Convert the DTO to a configuration object and update the configuration file.
properties = create_renewable_config(
study.version, **new_cluster.dict(by_alias=False, exclude_none=True)
)
path = _CLUSTER_PATH.format(area_id=area_id, cluster_id=renewable_id)
cmd = UpdateConfig(
target=path,
data=json.loads(properties.json(by_alias=True, exclude={"id"})),
command_context=self.storage_service.variant_study_service.command_factory.command_context,
)
commands.append(cmd)

file_study = self.storage_service.get_storage(study).get_raw(study)
execute_or_add_commands(study, file_study, commands, self.storage_service)

return new_renewables_by_areas

@staticmethod
def get_table_schema() -> JSON:
return RenewableClusterOutput.schema()
57 changes: 50 additions & 7 deletions antarest/study/business/areas/st_storage_management.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import collections
import functools
import json
import operator
Expand All @@ -15,6 +16,7 @@
STStorageMatrixNotFound,
STStorageNotFound,
)
from antarest.core.model import JSON
from antarest.study.business.utils import AllOptionalMetaclass, camel_case_model, execute_or_add_commands
from antarest.study.model import Study
from antarest.study.storage.rawstudy.model.filesystem.config.model import transform_name_to_id
Expand Down Expand Up @@ -336,15 +338,15 @@ def get_storages(
def get_all_storages_props(
self,
study: Study,
) -> t.Mapping[str, t.Sequence[STStorageOutput]]:
) -> t.Mapping[str, t.Mapping[str, STStorageOutput]]:
"""
Retrieve all short-term storages from all areas within a study.
Args:
study: Study from which to retrieve the storages.
Returns:
A mapping of area IDs to lists of short-term storages within the specified area.
A mapping of area IDs to a mapping of storage IDs to storage configurations.
Raises:
STStorageConfigNotFound: If no storages are found in the specified area.
Expand All @@ -360,11 +362,48 @@ def get_all_storages_props(
except KeyError:
raise STStorageConfigNotFound(path) from None

all_storages = {
area_id: [STStorageOutput.from_config(cluster_id, cluster) for cluster_id, cluster in cluster_obj.items()]
for area_id, cluster_obj in storages.items()
}
return all_storages
storages_by_areas: t.MutableMapping[str, t.MutableMapping[str, STStorageOutput]]
storages_by_areas = collections.defaultdict(dict)
for area_id, cluster_obj in storages.items():
for cluster_id, cluster in cluster_obj.items():
storages_by_areas[area_id][cluster_id] = STStorageOutput.from_config(cluster_id, cluster)

return storages_by_areas

def update_storages_props(
self,
study: Study,
update_storages_by_areas: t.Mapping[str, t.Mapping[str, STStorageInput]],
) -> t.Mapping[str, t.Mapping[str, STStorageOutput]]:
old_storages_by_areas = self.get_all_storages_props(study)
new_storages_by_areas = {area_id: dict(clusters) for area_id, clusters in old_storages_by_areas.items()}

# Prepare the commands to update the storage clusters.
commands = []
for area_id, update_storages_by_id in update_storages_by_areas.items():
old_storages_by_id = old_storages_by_areas[area_id]
for storage_id, update_cluster in update_storages_by_id.items():
# Update the storage cluster properties.
old_cluster = old_storages_by_id[storage_id]
new_cluster = old_cluster.copy(update=update_cluster.dict(by_alias=False, exclude_none=True))
new_storages_by_areas[area_id][storage_id] = new_cluster

# Convert the DTO to a configuration object and update the configuration file.
properties = create_st_storage_config(
study.version, **new_cluster.dict(by_alias=False, exclude_none=True)
)
path = _STORAGE_LIST_PATH.format(area_id=area_id, storage_id=storage_id)
cmd = UpdateConfig(
target=path,
data=json.loads(properties.json(by_alias=True, exclude={"id"})),
command_context=self.storage_service.variant_study_service.command_factory.command_context,
)
commands.append(cmd)

file_study = self.storage_service.get_storage(study).get_raw(study)
execute_or_add_commands(study, file_study, commands, self.storage_service)

return new_storages_by_areas

def get_storage(
self,
Expand Down Expand Up @@ -647,3 +686,7 @@ def validate_matrices(

# Validation successful
return True

@staticmethod
def get_table_schema() -> JSON:
return STStorageOutput.schema()
Loading

0 comments on commit 64a6fa0

Please sign in to comment.