Skip to content

Commit

Permalink
feat(bc): avoid unnecessary creation of RHS matrices for binding cons…
Browse files Browse the repository at this point in the history
…traints (#2077)


We now move or copy matrices to their right place when the BC operator
is changed.
  • Loading branch information
mabw-rte authored Jul 25, 2024
1 parent 2d58ab5 commit beaa4c1
Show file tree
Hide file tree
Showing 27 changed files with 1,166 additions and 287 deletions.
5 changes: 5 additions & 0 deletions antarest/core/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -603,3 +603,8 @@ def __init__(self) -> None:
HTTPStatus.BAD_REQUEST,
"You cannot scan the default internal workspace",
)


class ChildNotFoundError(HTTPException):
def __init__(self, message: str) -> None:
super().__init__(HTTPStatus.NOT_FOUND, message)
38 changes: 19 additions & 19 deletions antarest/study/business/areas/properties_management.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
import re
import typing as t
from builtins import sorted
from typing import Any, Dict, Iterable, List, Optional, Set, cast

from pydantic import root_validator

from antarest.core.exceptions import ChildNotFoundError
from antarest.study.business.utils import FieldInfo, FormFieldsBaseModel, execute_or_add_commands
from antarest.study.model import Study
from antarest.study.storage.rawstudy.model.filesystem.config.area import AdequacyPatchMode
from antarest.study.storage.rawstudy.model.filesystem.folder_node import ChildNotFoundError
from antarest.study.storage.storage_service import StudyStorageService
from antarest.study.storage.variantstudy.model.command.update_config import UpdateConfig

Expand All @@ -21,35 +21,35 @@
DEFAULT_FILTER_VALUE = FILTER_OPTIONS


def sort_filter_options(options: Iterable[str]) -> List[str]:
def sort_filter_options(options: t.Iterable[str]) -> t.List[str]:
return sorted(
options,
key=lambda x: FILTER_OPTIONS.index(x),
)


def encode_filter(value: str) -> Set[str]:
def encode_filter(value: str) -> t.Set[str]:
stripped = value.strip()
return set(re.split(r"\s*,\s*", stripped) if stripped else [])


def decode_filter(encoded_value: Set[str], current_filter: Optional[str] = None) -> str:
def decode_filter(encoded_value: t.Set[str], current_filter: t.Optional[str] = None) -> str:
return ", ".join(sort_filter_options(encoded_value))


class PropertiesFormFields(FormFieldsBaseModel):
energy_cost_unsupplied: Optional[float]
energy_cost_spilled: Optional[float]
non_dispatch_power: Optional[bool]
dispatch_hydro_power: Optional[bool]
other_dispatch_power: Optional[bool]
filter_synthesis: Optional[Set[str]]
filter_by_year: Optional[Set[str]]
energy_cost_unsupplied: t.Optional[float]
energy_cost_spilled: t.Optional[float]
non_dispatch_power: t.Optional[bool]
dispatch_hydro_power: t.Optional[bool]
other_dispatch_power: t.Optional[bool]
filter_synthesis: t.Optional[t.Set[str]]
filter_by_year: t.Optional[t.Set[str]]
# version 830
adequacy_patch_mode: Optional[AdequacyPatchMode]
adequacy_patch_mode: t.Optional[AdequacyPatchMode]

@root_validator
def validation(cls, values: Dict[str, Any]) -> Dict[str, Any]:
def validation(cls, values: t.Dict[str, t.Any]) -> t.Dict[str, t.Any]:
filters = {
"filter_synthesis": values.get("filter_synthesis"),
"filter_by_year": values.get("filter_by_year"),
Expand All @@ -63,7 +63,7 @@ def validation(cls, values: Dict[str, Any]) -> Dict[str, Any]:
return values


FIELDS_INFO: Dict[str, FieldInfo] = {
FIELDS_INFO: t.Dict[str, FieldInfo] = {
"energy_cost_unsupplied": {
"path": THERMAL_PATH.format(field="unserverdenergycost"),
"default_value": 0.0,
Expand Down Expand Up @@ -116,9 +116,9 @@ def get_field_values(
file_study = self.storage_service.get_storage(study).get_raw(study)
study_ver = file_study.config.version

def get_value(field_info: FieldInfo) -> Any:
start_ver = cast(int, field_info.get("start_version", 0))
end_ver = cast(int, field_info.get("end_version", study_ver))
def get_value(field_info: FieldInfo) -> t.Any:
start_ver = t.cast(int, field_info.get("start_version", 0))
end_ver = t.cast(int, field_info.get("end_version", study_ver))
is_in_version = start_ver <= study_ver <= end_ver
if not is_in_version:
return None
Expand All @@ -139,7 +139,7 @@ def set_field_values(
area_id: str,
field_values: PropertiesFormFields,
) -> None:
commands: List[UpdateConfig] = []
commands: t.List[UpdateConfig] = []
file_study = self.storage_service.get_storage(study).get_raw(study)
context = self.storage_service.variant_study_service.command_factory.command_context

Expand Down
2 changes: 1 addition & 1 deletion antarest/study/business/areas/st_storage_management.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@

from antarest.core.exceptions import (
AreaNotFound,
ChildNotFoundError,
DuplicateSTStorage,
STStorageConfigNotFound,
STStorageMatrixNotFound,
Expand All @@ -29,7 +30,6 @@
create_st_storage_config,
)
from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy
from antarest.study.storage.rawstudy.model.filesystem.folder_node import ChildNotFoundError
from antarest.study.storage.storage_service import StudyStorageService
from antarest.study.storage.variantstudy.model.command.create_st_storage import CreateSTStorage
from antarest.study.storage.variantstudy.model.command.remove_st_storage import RemoveSTStorage
Expand Down
89 changes: 56 additions & 33 deletions antarest/study/business/binding_constraint_management.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import collections
import itertools
import json
import logging
import typing as t
Expand Down Expand Up @@ -46,11 +45,11 @@
from antarest.study.storage.variantstudy.model.command.create_binding_constraint import (
DEFAULT_GROUP,
EXPECTED_MATRIX_SHAPES,
TERM_MATRICES,
BindingConstraintMatrices,
BindingConstraintPropertiesBase,
CreateBindingConstraint,
OptionalProperties,
TermMatrices,
)
from antarest.study.storage.variantstudy.model.command.remove_binding_constraint import RemoveBindingConstraint
from antarest.study.storage.variantstudy.model.command.update_binding_constraint import UpdateBindingConstraint
Expand All @@ -59,6 +58,14 @@
logger = logging.getLogger(__name__)


OPERATOR_CONFLICT_MAP = {
BindingConstraintOperator.EQUAL: [TermMatrices.LESS.value, TermMatrices.GREATER.value],
BindingConstraintOperator.GREATER: [TermMatrices.LESS.value, TermMatrices.EQUAL.value],
BindingConstraintOperator.LESS: [TermMatrices.EQUAL.value, TermMatrices.GREATER.value],
BindingConstraintOperator.BOTH: [TermMatrices.EQUAL.value],
}


class LinkTerm(BaseModel):
"""
DTO for a constraint term on a link between two areas.
Expand Down Expand Up @@ -246,7 +253,7 @@ class ConstraintCreation(ConstraintInput):

@root_validator(pre=True)
def check_matrices_dimensions(cls, values: t.Dict[str, t.Any]) -> t.Dict[str, t.Any]:
for _key in ["time_step"] + TERM_MATRICES:
for _key in ["time_step"] + [m.value for m in TermMatrices]:
_camel = to_camel_case(_key)
values[_key] = values.pop(_camel, values.get(_key))

Expand All @@ -264,7 +271,7 @@ def check_matrices_dimensions(cls, values: t.Dict[str, t.Any]) -> t.Dict[str, t.

# Collect the matrix shapes
matrix_shapes = {}
for _field_name in ["values"] + TERM_MATRICES:
for _field_name in ["values"] + [m.value for m in TermMatrices]:
if _matrix := values.get(_field_name):
_array = np.array(_matrix)
# We only store the shape if the array is not empty
Expand Down Expand Up @@ -330,32 +337,35 @@ def _get_references_by_widths(
The height of the matrices may vary depending on the time step,
but the width should be consistent within a group of binding constraints.
"""
if int(file_study.config.version) < 870:
matrix_id_fmts = {"{bc_id}"}
else:
matrix_id_fmts = {"{bc_id}_eq", "{bc_id}_lt", "{bc_id}_gt"}
operator_matrix_file_map = {
BindingConstraintOperator.EQUAL: ["{bc_id}_eq"],
BindingConstraintOperator.GREATER: ["{bc_id}_gt"],
BindingConstraintOperator.LESS: ["{bc_id}_lt"],
BindingConstraintOperator.BOTH: ["{bc_id}_lt", "{bc_id}_gt"],
}

references_by_width: t.Dict[int, t.List[t.Tuple[str, str]]] = {}
_total = len(bcs) * len(matrix_id_fmts)
for _index, (bc, fmt) in enumerate(itertools.product(bcs, matrix_id_fmts), 1):
bc_id = bc.id
matrix_id = fmt.format(bc_id=bc.id)
logger.info(f"⏲ Validating BC '{bc_id}': {matrix_id=} [{_index}/{_total}]")
obj = file_study.tree.get(url=["input", "bindingconstraints", matrix_id])
matrix = np.array(obj["data"], dtype=float)
# We ignore empty matrices as there are default matrices for the simulator.
if not matrix.size:
continue

matrix_height = matrix.shape[0]
expected_height = EXPECTED_MATRIX_SHAPES[bc.time_step][0]
if matrix_height != expected_height:
raise WrongMatrixHeightError(
f"The binding constraint '{bc.name}' should have {expected_height} rows, currently: {matrix_height}"
)
matrix_width = matrix.shape[1]
if matrix_width > 1:
references_by_width.setdefault(matrix_width, []).append((bc_id, matrix_id))
_total = len(bcs)
for _index, bc in enumerate(bcs):
matrices_name = operator_matrix_file_map[bc.operator] if file_study.config.version >= 870 else ["{bc_id}"]
for matrix_name in matrices_name:
matrix_id = matrix_name.format(bc_id=bc.id)
logger.info(f"⏲ Validating BC '{bc.id}': {matrix_id=} [{_index+1}/{_total}]")
obj = file_study.tree.get(url=["input", "bindingconstraints", matrix_id])
matrix = np.array(obj["data"], dtype=float)
# We ignore empty matrices as there are default matrices for the simulator.
if not matrix.size:
continue

matrix_height = matrix.shape[0]
expected_height = EXPECTED_MATRIX_SHAPES[bc.time_step][0]
if matrix_height != expected_height:
raise WrongMatrixHeightError(
f"The binding constraint '{bc.name}' should have {expected_height} rows, currently: {matrix_height}"
)
matrix_width = matrix.shape[1]
if matrix_width > 1:
references_by_width.setdefault(matrix_width, []).append((bc.id, matrix_id))

return references_by_width

Expand Down Expand Up @@ -684,7 +694,8 @@ def create_binding_constraint(
if bc_id in {bc.id for bc in self.get_binding_constraints(study)}:
raise DuplicateConstraintName(f"A binding constraint with the same name already exists: {bc_id}.")

check_attributes_coherence(data, version)
# TODO: the default operator should be fixed somewhere so this condition can be consistent
check_attributes_coherence(data, version, data.operator or BindingConstraintOperator.EQUAL)

new_constraint = {"name": data.name, **json.loads(data.json(exclude={"terms", "name"}, exclude_none=True))}
args = {
Expand Down Expand Up @@ -718,8 +729,9 @@ def update_binding_constraint(
) -> ConstraintOutput:
file_study = self.storage_service.get_storage(study).get_raw(study)
existing_constraint = self.get_binding_constraint(study, binding_constraint_id)

study_version = int(study.version)
check_attributes_coherence(data, study_version)
check_attributes_coherence(data, study_version, data.operator or existing_constraint.operator)

upd_constraint = {
"id": binding_constraint_id,
Expand All @@ -740,7 +752,7 @@ def update_binding_constraint(

# Validates the matrices. Needed when the study is a variant because we only append the command to the list
if isinstance(study, VariantStudy):
updated_matrices = [term for term in TERM_MATRICES if getattr(data, term)]
updated_matrices = [term for term in [m.value for m in TermMatrices] if getattr(data, term)]
time_step = data.time_step or existing_constraint.time_step
command.validates_and_fills_matrices(
time_step=time_step, specific_matrices=updated_matrices, version=study_version, create=False
Expand Down Expand Up @@ -912,13 +924,17 @@ def _replace_matrices_according_to_frequency_and_version(
BindingConstraintFrequency.DAILY.value: default_bc_weekly_daily_87,
BindingConstraintFrequency.WEEKLY.value: default_bc_weekly_daily_87,
}[data.time_step].tolist()
for term in TERM_MATRICES:
for term in [m.value for m in TermMatrices]:
if term not in args:
args[term] = matrix
return args


def check_attributes_coherence(data: t.Union[ConstraintCreation, ConstraintInput], study_version: int) -> None:
def check_attributes_coherence(
data: t.Union[ConstraintCreation, ConstraintInput],
study_version: int,
operator: BindingConstraintOperator,
) -> None:
if study_version < 870:
if data.group:
raise InvalidFieldForVersionError(
Expand All @@ -928,3 +944,10 @@ def check_attributes_coherence(data: t.Union[ConstraintCreation, ConstraintInput
raise InvalidFieldForVersionError("You cannot fill a 'matrix_term' as these values refer to v8.7+ studies")
elif data.values:
raise InvalidFieldForVersionError("You cannot fill 'values' as it refers to the matrix before v8.7")
conflicting_matrices = [
getattr(data, matrix) for matrix in OPERATOR_CONFLICT_MAP[operator] if getattr(data, matrix)
]
if conflicting_matrices:
raise InvalidFieldForVersionError(
f"You cannot fill matrices '{OPERATOR_CONFLICT_MAP[operator]}' while using the operator '{operator}'"
)
2 changes: 1 addition & 1 deletion antarest/study/business/table_mode_management.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import numpy as np
import pandas as pd

from antarest.core.exceptions import ChildNotFoundError
from antarest.core.model import JSON
from antarest.study.business.area_management import AreaManager, AreaOutput
from antarest.study.business.areas.renewable_management import RenewableClusterInput, RenewableManager
Expand All @@ -13,7 +14,6 @@
from antarest.study.business.enum_ignore_case import EnumIgnoreCase
from antarest.study.business.link_management import LinkManager, LinkOutput
from antarest.study.model import RawStudy
from antarest.study.storage.rawstudy.model.filesystem.folder_node import ChildNotFoundError

_TableIndex = str # row name
_TableColumn = str # column name
Expand Down
4 changes: 2 additions & 2 deletions antarest/study/business/xpansion_management.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,14 +9,14 @@
from fastapi import HTTPException, UploadFile
from pydantic import BaseModel, Extra, Field, ValidationError, root_validator, validator

from antarest.core.exceptions import BadZipBinary
from antarest.core.exceptions import BadZipBinary, ChildNotFoundError
from antarest.core.model import JSON
from antarest.study.business.all_optional_meta import AllOptionalMetaclass
from antarest.study.business.enum_ignore_case import EnumIgnoreCase
from antarest.study.model import Study
from antarest.study.storage.rawstudy.model.filesystem.bucket_node import BucketNode
from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy
from antarest.study.storage.rawstudy.model.filesystem.folder_node import ChildNotFoundError, FolderNode
from antarest.study.storage.rawstudy.model.filesystem.folder_node import FolderNode
from antarest.study.storage.rawstudy.model.filesystem.root.user.expansion.expansion import Expansion
from antarest.study.storage.storage_service import StudyStorageService
from antarest.study.storage.utils import fix_study_root
Expand Down
2 changes: 1 addition & 1 deletion antarest/study/service.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
from antarest.core.config import Config
from antarest.core.exceptions import (
BadEditInstructionException,
ChildNotFoundError,
CommandApplicationError,
IncorrectPathError,
NotAManagedStudyException,
Expand Down Expand Up @@ -106,7 +107,6 @@
)
from antarest.study.storage.matrix_profile import adjust_matrix_columns_index
from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfigDTO
from antarest.study.storage.rawstudy.model.filesystem.folder_node import ChildNotFoundError
from antarest.study.storage.rawstudy.model.filesystem.ini_file_node import IniFileNode
from antarest.study.storage.rawstudy.model.filesystem.inode import INode
from antarest.study.storage.rawstudy.model.filesystem.matrix.input_series_matrix import InputSeriesMatrix
Expand Down
Original file line number Diff line number Diff line change
@@ -1,10 +1,8 @@
import shutil
import typing as t
from abc import ABC, abstractmethod
from http import HTTPStatus

from fastapi import HTTPException

from antarest.core.exceptions import ChildNotFoundError
from antarest.core.model import JSON, SUB_JSON
from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig
from antarest.study.storage.rawstudy.model.filesystem.context import ContextServer
Expand All @@ -15,11 +13,6 @@ class FilterError(Exception):
pass


class ChildNotFoundError(HTTPException):
def __init__(self, message: str) -> None:
super().__init__(HTTPStatus.NOT_FOUND, message)


class FolderNode(INode[JSON, SUB_JSON, JSON], ABC):
# noinspection SpellCheckingInspection
"""
Expand Down
Loading

0 comments on commit beaa4c1

Please sign in to comment.