Skip to content

Commit

Permalink
feat(bc): show existing matrices only (#2109)
Browse files Browse the repository at this point in the history
Depending on the BC operator, only relevant matrix files are
now expected and considered present.
  • Loading branch information
MartinBelthle authored Aug 23, 2024
1 parent 6bfd4e4 commit cb1232d
Show file tree
Hide file tree
Showing 17 changed files with 253 additions and 248 deletions.
13 changes: 7 additions & 6 deletions antarest/study/business/binding_constraint_management.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,9 @@
from antarest.study.business.utils import execute_or_add_commands
from antarest.study.model import Study
from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import (
DEFAULT_GROUP,
DEFAULT_OPERATOR,
DEFAULT_TIMESTEP,
BindingConstraintFrequency,
BindingConstraintOperator,
)
Expand All @@ -43,7 +46,6 @@
default_bc_weekly_daily as default_bc_weekly_daily_86,
)
from antarest.study.storage.variantstudy.model.command.create_binding_constraint import (
DEFAULT_GROUP,
EXPECTED_MATRIX_SHAPES,
BindingConstraintMatrices,
BindingConstraintPropertiesBase,
Expand Down Expand Up @@ -470,8 +472,8 @@ def constraint_model_adapter(constraint: t.Mapping[str, t.Any], version: int) ->
"id": constraint["id"],
"name": constraint["name"],
"enabled": constraint.get("enabled", True),
"time_step": constraint.get("type", BindingConstraintFrequency.HOURLY),
"operator": constraint.get("operator", BindingConstraintOperator.EQUAL),
"time_step": constraint.get("type", DEFAULT_TIMESTEP),
"operator": constraint.get("operator", DEFAULT_OPERATOR),
"comments": constraint.get("comments", ""),
"terms": constraint.get("terms", []),
}
Expand Down Expand Up @@ -694,8 +696,7 @@ def create_binding_constraint(
if bc_id in {bc.id for bc in self.get_binding_constraints(study)}:
raise DuplicateConstraintName(f"A binding constraint with the same name already exists: {bc_id}.")

# TODO: the default operator should be fixed somewhere so this condition can be consistent
check_attributes_coherence(data, version, data.operator or BindingConstraintOperator.EQUAL)
check_attributes_coherence(data, version, data.operator or DEFAULT_OPERATOR)

new_constraint = {"name": data.name, **json.loads(data.json(exclude={"terms", "name"}, exclude_none=True))}
args = {
Expand All @@ -709,7 +710,7 @@ def create_binding_constraint(

# Validates the matrices. Needed when the study is a variant because we only append the command to the list
if isinstance(study, VariantStudy):
time_step = data.time_step or BindingConstraintFrequency.HOURLY
time_step = data.time_step or DEFAULT_TIMESTEP
command.validates_and_fills_matrices(
time_step=time_step, specific_matrices=None, version=version, create=True
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@
Object model used to read and update binding constraint configuration.
"""

import typing as t

from antarest.study.business.enum_ignore_case import EnumIgnoreCase


Expand Down Expand Up @@ -35,3 +37,17 @@ class BindingConstraintOperator(EnumIgnoreCase):
GREATER = "greater"
BOTH = "both"
EQUAL = "equal"


OPERATOR_MATRICES_MAP: t.Dict[BindingConstraintOperator, t.List[str]] = {
BindingConstraintOperator.EQUAL: ["eq"],
BindingConstraintOperator.GREATER: ["gt"],
BindingConstraintOperator.LESS: ["lt"],
BindingConstraintOperator.BOTH: ["lt", "gt"],
}


DEFAULT_GROUP = "default"
"""Default group for binding constraints (since v8.7)."""
DEFAULT_OPERATOR = BindingConstraintOperator.EQUAL
DEFAULT_TIMESTEP = BindingConstraintFrequency.HOURLY
26 changes: 14 additions & 12 deletions antarest/study/storage/rawstudy/model/filesystem/config/files.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,14 +10,18 @@

from antarest.core.model import JSON
from antarest.study.storage.rawstudy.ini_reader import IniReader
from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import BindingConstraintFrequency
from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import (
DEFAULT_GROUP,
DEFAULT_OPERATOR,
DEFAULT_TIMESTEP,
BindingConstraintFrequency,
)
from antarest.study.storage.rawstudy.model.filesystem.config.exceptions import (
SimulationParsingError,
XpansionParsingError,
)
from antarest.study.storage.rawstudy.model.filesystem.config.field_validators import extract_filtering
from antarest.study.storage.rawstudy.model.filesystem.config.model import (
DEFAULT_GROUP,
Area,
BindingConstraintDTO,
DistrictSet,
Expand Down Expand Up @@ -212,25 +216,23 @@ def _parse_bindings(root: Path) -> t.List[BindingConstraintDTO]:
# contains a set of strings in the following format: "area.cluster"
cluster_set = set()
# Default value for time_step
time_step = BindingConstraintFrequency.HOURLY
time_step = bind.get("type", DEFAULT_TIMESTEP)
# Default value for operator
operator = bind.get("operator", DEFAULT_OPERATOR)
# Default value for group
group = bind.get("group", DEFAULT_GROUP)
# Build areas and clusters based on terms
for key in bind:
if key == "type":
time_step = BindingConstraintFrequency(bind[key])
elif "%" in key:
if "%" in key:
areas = key.split("%", 1)
area_set.add(areas[0])
area_set.add(areas[1])
elif "." in key:
cluster_set.add(key)
area_set.add(key.split(".", 1)[0])

group = bind.get("group", DEFAULT_GROUP)
bc = BindingConstraintDTO(
id=bind["id"],
areas=area_set,
clusters=cluster_set,
time_step=time_step,
group=group,
id=bind["id"], areas=area_set, clusters=cluster_set, time_step=time_step, operator=operator, group=group
)
output_list.append(bc)

Expand Down
18 changes: 12 additions & 6 deletions antarest/study/storage/rawstudy/model/filesystem/config/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,15 +7,18 @@
from antarest.core.utils.utils import DTO
from antarest.study.business.enum_ignore_case import EnumIgnoreCase

from .binding_constraint import BindingConstraintFrequency
from .binding_constraint import (
DEFAULT_GROUP,
DEFAULT_OPERATOR,
DEFAULT_TIMESTEP,
BindingConstraintFrequency,
BindingConstraintOperator,
)
from .field_validators import extract_filtering
from .renewable import RenewableConfigType
from .st_storage import STStorageConfigType
from .thermal import ThermalConfigType

DEFAULT_GROUP = "default"
"""Default group for binding constraints (since v8.7)."""


class EnrModelling(EnumIgnoreCase):
"""
Expand Down Expand Up @@ -121,15 +124,18 @@ class BindingConstraintDTO(BaseModel):
Attributes:
id: The ID of the binding constraint.
group: The group for the scenario of BC (optional, required since v8.7).
areas: List of area IDs on which the BC applies (links or clusters).
clusters: List of thermal cluster IDs on which the BC applies (format: "area.cluster").
time_step: The time_step of the BC
operator: The operator of the BC
group: The group for the scenario of BC (optional, required since v8.7).
"""

id: str
areas: t.Set[str]
clusters: t.Set[str]
time_step: BindingConstraintFrequency
time_step: BindingConstraintFrequency = DEFAULT_TIMESTEP
operator: BindingConstraintOperator = DEFAULT_OPERATOR
# since v8.7
group: str = DEFAULT_GROUP

Expand Down
28 changes: 0 additions & 28 deletions antarest/study/storage/rawstudy/model/filesystem/lazy_node.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,10 @@
import shutil
import typing as t
from abc import ABC, abstractmethod
from dataclasses import dataclass
from datetime import datetime, timedelta
from pathlib import Path
from zipfile import ZipFile

from antarest.core.exceptions import ChildNotFoundError
from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig
from antarest.study.storage.rawstudy.model.filesystem.context import ContextServer
from antarest.study.storage.rawstudy.model.filesystem.inode import G, INode, S, V
Expand Down Expand Up @@ -109,22 +107,6 @@ def delete(self, url: t.Optional[t.List[str]] = None) -> None:
elif self.config.path.exists():
self.config.path.unlink()

def _infer_path(self) -> Path:
if self.get_link_path().exists():
return self.get_link_path()
elif self.config.path.exists():
return self.config.path
else:
raise ChildNotFoundError(
f"Neither link file {self.get_link_path} nor matrix file {self.config.path} exists"
)

def _infer_target_path(self, is_link: bool) -> Path:
if is_link:
return self.get_link_path()
else:
return self.config.path

def get_link_path(self) -> Path:
path = self.config.path.parent / (self.config.path.name + ".link")
return path
Expand All @@ -144,16 +126,6 @@ def save(self, data: t.Union[str, bytes, S], url: t.Optional[t.List[str]] = None
self.get_link_path().unlink()
return None

def rename_file(self, target: "LazyNode[t.Any, t.Any, t.Any]") -> None:
target_path = target._infer_target_path(self.get_link_path().exists())
target_path.unlink(missing_ok=True)
self._infer_path().rename(target_path)

def copy_file(self, target: "LazyNode[t.Any, t.Any, t.Any]") -> None:
target_path = target._infer_target_path(self.get_link_path().exists())
target_path.unlink(missing_ok=True)
shutil.copy(self._infer_path(), target_path)

def get_lazy_content(
self,
url: t.Optional[t.List[str]] = None,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import logging
import shutil
from pathlib import Path
from typing import Any, List, Optional, Union, cast

Expand Down Expand Up @@ -73,11 +74,11 @@ def parse(
raise ChildNotFoundError(f"File '{relpath}' not found in the study '{study_id}'") from e

stopwatch.log_elapsed(lambda x: logger.info(f"Matrix parsed in {x}s"))
matrix.dropna(how="any", axis=1, inplace=True)
final_matrix = matrix.dropna(how="any", axis=1)
if return_dataframe:
return matrix
return final_matrix

data = cast(JSON, matrix.to_dict(orient="split"))
data = cast(JSON, final_matrix.to_dict(orient="split"))
stopwatch.log_elapsed(lambda x: logger.info(f"Matrix to dict in {x}s"))

return data
Expand All @@ -100,3 +101,20 @@ def check_errors(
if self.nb_columns and len(data) != self.nb_columns:
errors.append(f"{self.config.path}: Data was wrong size. expected {self.nb_columns} get {len(data)}")
return errors

def _infer_path(self) -> Path:
if self.get_link_path().exists():
return self.get_link_path()
elif self.config.path.exists():
return self.config.path
raise ChildNotFoundError(f"Neither link file {self.get_link_path()} nor matrix file {self.config.path} exists")

def rename_file(self, target: str) -> None:
target_path = self.config.path.parent.joinpath(f"{target}{''.join(self._infer_path().suffixes)}")
target_path.unlink(missing_ok=True)
self._infer_path().rename(target_path)

def copy_file(self, target: str) -> None:
target_path = self.config.path.parent.joinpath(f"{target}{''.join(self._infer_path().suffixes)}")
target_path.unlink(missing_ok=True)
shutil.copy(self._infer_path(), target_path)
Original file line number Diff line number Diff line change
@@ -1,4 +1,7 @@
from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import BindingConstraintFrequency
from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import (
OPERATOR_MATRICES_MAP,
BindingConstraintFrequency,
)
from antarest.study.storage.rawstudy.model.filesystem.folder_node import FolderNode
from antarest.study.storage.rawstudy.model.filesystem.inode import TREE
from antarest.study.storage.rawstudy.model.filesystem.matrix.input_series_matrix import InputSeriesMatrix
Expand Down Expand Up @@ -52,7 +55,8 @@ def build(self) -> TREE:
}
children = {}
for binding in self.config.bindings:
for term in ["lt", "gt", "eq"]:
terms = OPERATOR_MATRICES_MAP[binding.operator]
for term in terms:
matrix_id = f"{binding.id}_{term}"
children[matrix_id] = InputSeriesMatrix(
self.context,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,23 +1,25 @@
import typing as t

from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import BindingConstraintFrequency
from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import (
DEFAULT_TIMESTEP,
BindingConstraintFrequency,
BindingConstraintOperator,
)
from antarest.study.storage.rawstudy.model.filesystem.config.model import BindingConstraintDTO, FileStudyTreeConfig


def parse_bindings_coeffs_and_save_into_config(
bd_id: str,
study_data_config: FileStudyTreeConfig,
coeffs: t.Mapping[str, t.Union[t.Literal["hourly", "daily", "weekly"], t.Sequence[float]]],
operator: BindingConstraintOperator,
time_step: BindingConstraintFrequency,
group: str,
) -> None:
if bd_id not in [bind.id for bind in study_data_config.bindings]:
areas_set = set()
clusters_set = set()
# Default time_step value
time_step = BindingConstraintFrequency.HOURLY
for k, v in coeffs.items():
if k == "type":
time_step = BindingConstraintFrequency(v)
if "%" in k:
areas_set |= set(k.split("%"))
elif "." in k:
Expand All @@ -28,6 +30,7 @@ def parse_bindings_coeffs_and_save_into_config(
group=group,
areas=areas_set,
clusters=clusters_set,
operator=operator,
time_step=time_step,
)
study_data_config.bindings.append(bc)
Expand Down
Loading

0 comments on commit cb1232d

Please sign in to comment.