Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(api): do not allow areas, links or thermals deletion when referenced in a binding constraint #2061

Merged
29 changes: 29 additions & 0 deletions antarest/core/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -325,6 +325,35 @@ def __init__(self, is_variant: bool) -> None:
super().__init__(HTTPStatus.EXPECTATION_FAILED, "Upgrade not supported for parent of variants")


class ReferencedObjectDeletionNotAllowed(HTTPException):
"""
Exception raised when a binding constraint is not allowed to be deleted because it references
other objects: areas, links or thermal clusters.
"""

def __init__(self, object_id: str, binding_ids: t.Sequence[str], *, object_type: str) -> None:
"""
Initialize the exception.

Args:
object_id: ID of the object that is not allowed to be deleted.
binding_ids: Binding constraints IDs that reference the object.
object_type: Type of the object that is not allowed to be deleted: area, link or thermal cluster.
"""
max_count = 10
first_bcs_ids = ",\n".join(f"{i}- '{bc}'" for i, bc in enumerate(binding_ids[:max_count], 1))
and_more = f",\nand {len(binding_ids) - max_count} more..." if len(binding_ids) > max_count else "."
message = (
f"{object_type} '{object_id}' is not allowed to be deleted, because it is referenced"
f" in the following binding constraints:\n{first_bcs_ids}{and_more}"
)
super().__init__(HTTPStatus.FORBIDDEN, message)

def __str__(self) -> str:
"""Return a string representation of the exception."""
return self.detail


class UnsupportedStudyVersion(HTTPException):
def __init__(self, version: str) -> None:
super().__init__(
Expand Down
13 changes: 6 additions & 7 deletions antarest/study/business/binding_constraint_management.py
Original file line number Diff line number Diff line change
Expand Up @@ -501,13 +501,12 @@ def terms_to_coeffs(terms: t.Sequence[ConstraintTerm]) -> t.Dict[str, t.List[flo
:return: A dictionary of term IDs mapped to a list of their coefficients.
"""
coeffs = {}
if terms is not None:
for term in terms:
if term.id and term.weight is not None:
coeffs[term.id] = [term.weight]
if term.offset:
coeffs[term.id].append(term.offset)
return coeffs
for term in terms:
if term.id and term.weight is not None:
coeffs[term.id] = [term.weight]
if term.offset:
coeffs[term.id].append(term.offset)
return coeffs

def get_binding_constraint(self, study: Study, bc_id: str) -> ConstraintOutput:
"""
Expand Down
38 changes: 31 additions & 7 deletions antarest/study/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -426,13 +426,37 @@ class ExportFormat(str, enum.Enum):
TAR_GZ = "application/tar+gz"
JSON = "application/json"

@staticmethod
def from_dto(data: str) -> "ExportFormat":
if data == "application/zip":
return ExportFormat.ZIP
if data == "application/tar+gz":
return ExportFormat.TAR_GZ
return ExportFormat.JSON
@classmethod
def from_dto(cls, accept_header: str) -> "ExportFormat":
"""
Convert the "Accept" header to the corresponding content type.

Args:
accept_header: Value of the "Accept" header.

Returns:
The corresponding content type: ZIP, TAR_GZ or JSON.
By default, JSON is returned if the format is not recognized.
For instance, if the "Accept" header is "*/*", JSON is returned.
"""
mapping = {
"application/zip": ExportFormat.ZIP,
"application/tar+gz": ExportFormat.TAR_GZ,
"application/json": ExportFormat.JSON,
}
return mapping.get(accept_header, ExportFormat.JSON)

@property
def suffix(self) -> str:
"""
Returns the file suffix associated with the format: ".zip", ".tar.gz" or ".json".
"""
mapping = {
ExportFormat.ZIP: ".zip",
ExportFormat.TAR_GZ: ".tar.gz",
ExportFormat.JSON: ".json",
}
return mapping[self]


class StudyDownloadDTO(BaseModel):
Expand Down
145 changes: 100 additions & 45 deletions antarest/study/service.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
CommandApplicationError,
IncorrectPathError,
NotAManagedStudyException,
ReferencedObjectDeletionNotAllowed,
StudyDeletionNotAllowed,
StudyNotFoundError,
StudyTypeUnsupported,
Expand Down Expand Up @@ -56,7 +57,7 @@
from antarest.study.business.areas.renewable_management import RenewableManager
from antarest.study.business.areas.st_storage_management import STStorageManager
from antarest.study.business.areas.thermal_management import ThermalManager
from antarest.study.business.binding_constraint_management import BindingConstraintManager
from antarest.study.business.binding_constraint_management import BindingConstraintManager, ConstraintFilters, LinkTerm
from antarest.study.business.config_management import ConfigManager
from antarest.study.business.correlation_management import CorrelationManager
from antarest.study.business.district_manager import DistrictManager
Expand Down Expand Up @@ -686,7 +687,7 @@
id=sid,
name=study_name,
workspace=DEFAULT_WORKSPACE_NAME,
path=study_path,
path=str(study_path),
created_at=datetime.utcnow(),
updated_at=datetime.utcnow(),
version=version or NEW_DEFAULT_STUDY_VERSION,
Expand Down Expand Up @@ -1187,13 +1188,13 @@
"""
Download outputs
Args:
study_id: study Id
output_id: output id
data: Json parameters
use_task: use task or not
filetype: type of returning file,
tmp_export_file: temporary file (if use_task is false),
params: request parameters
study_id: study ID.
output_id: output ID.
data: Json parameters.
use_task: use task or not.
filetype: type of returning file,.
tmp_export_file: temporary file (if `use_task` is false),.
params: request parameters.

Returns: CSV content file

Expand All @@ -1202,35 +1203,33 @@
study = self.get_study(study_id)
assert_permission(params.user, study, StudyPermissionType.READ)
self._assert_study_unarchived(study)
logger.info(
f"Study {study_id} output download asked by {params.get_user_id()}",
)
logger.info(f"Study {study_id} output download asked by {params.get_user_id()}")

if use_task:
logger.info(f"Exporting {output_id} from study {study_id}")
export_name = f"Study filtered output {study.name}/{output_id} export"
export_file_download = self.file_transfer_manager.request_download(
f"{study.name}-{study_id}-{output_id}_filtered.{'tar.gz' if filetype == ExportFormat.TAR_GZ else 'zip'}",
f"{study.name}-{study_id}-{output_id}_filtered{filetype.suffix}",
export_name,
params.user,
)
export_path = Path(export_file_download.path)
export_id = export_file_download.id

def export_task(notifier: TaskUpdateNotifier) -> TaskResult:
def export_task(_notifier: TaskUpdateNotifier) -> TaskResult:
try:
study = self.get_study(study_id)
stopwatch = StopWatch()
matrix = StudyDownloader.build(
self.storage_service.get_storage(study).get_raw(study),
_study = self.get_study(study_id)
_stopwatch = StopWatch()
_matrix = StudyDownloader.build(
self.storage_service.get_storage(_study).get_raw(_study),
output_id,
data,
)
stopwatch.log_elapsed(
_stopwatch.log_elapsed(
lambda x: logger.info(f"Study {study_id} filtered output {output_id} built in {x}s")
)
StudyDownloader.export(matrix, filetype, export_path)
stopwatch.log_elapsed(
StudyDownloader.export(_matrix, filetype, export_path)
_stopwatch.log_elapsed(
lambda x: logger.info(f"Study {study_id} filtered output {output_id} exported in {x}s")
)
self.file_transfer_manager.set_ready(export_id)
Expand All @@ -1240,7 +1239,7 @@
)
except Exception as e:
self.file_transfer_manager.fail(export_id, str(e))
raise e
raise

task_id = self.task_service.add_task(
export_task,
Expand All @@ -1265,17 +1264,18 @@
stopwatch.log_elapsed(
lambda x: logger.info(f"Study {study_id} filtered output {output_id} exported in {x}s")
)
return FileResponse(
tmp_export_file,
headers=(
{"Content-Disposition": "inline"}
if filetype == ExportFormat.JSON
else {
"Content-Disposition": f'attachment; filename="output-{output_id}.{"tar.gz" if filetype == ExportFormat.TAR_GZ else "zip"}'
}
),
media_type=filetype,
)

if filetype == ExportFormat.JSON:
headers = {"Content-Disposition": "inline"}
elif filetype == ExportFormat.TAR_GZ:
headers = {"Content-Disposition": f'attachment; filename="output-{output_id}.tar.gz'}
elif filetype == ExportFormat.ZIP:
headers = {"Content-Disposition": f'attachment; filename="output-{output_id}.zip'}
else: # pragma: no cover
raise NotImplementedError(f"Export format {filetype} is not supported")

return FileResponse(tmp_export_file, headers=headers, media_type=filetype)
Fixed Show fixed Hide fixed
Dismissed Show dismissed Hide dismissed

else:
json_response = json.dumps(
matrix.dict(),
Expand Down Expand Up @@ -1314,26 +1314,20 @@
params: RequestParameters,
) -> None:
"""
Set simulation as the reference output
Set simulation as the reference output.

Args:
study_id: study Id
output_id: output id
status: state of the reference status
study_id: study ID.
output_id: The ID of the output to set as reference.
status: state of the reference status.
params: request parameters

Returns: None

"""
study = self.get_study(study_id)
assert_permission(params.user, study, StudyPermissionType.WRITE)
self._assert_study_unarchived(study)

logger.info(
"output %s set by user %s as reference (%b) for study %s",
output_id,
params.get_user_id(),
status,
study_id,
f"output {output_id} set by user {params.get_user_id()} as reference ({status}) for study {study_id}"
)

self.storage_service.get_storage(study).set_reference_output(study, output_id, status)
Expand Down Expand Up @@ -1855,9 +1849,27 @@
return self.areas.update_thermal_cluster_metadata(study, area_id, clusters_metadata)

def delete_area(self, uuid: str, area_id: str, params: RequestParameters) -> None:
"""
Delete area from study if it is not referenced by a binding constraint,
otherwise raise an HTTP 403 Forbidden error.

Args:
uuid: The study ID.
area_id: The area ID to delete.
params: The request parameters used to check user permissions.

Raises:
ReferencedObjectDeletionNotAllowed: If the area is referenced by a binding constraint.
"""
study = self.get_study(uuid)
assert_permission(params.user, study, StudyPermissionType.WRITE)
self._assert_study_unarchived(study)
referencing_binding_constraints = self.binding_constraint_manager.get_binding_constraints(
study, ConstraintFilters(area_name=area_id)
)
if referencing_binding_constraints:
binding_ids = [bc.id for bc in referencing_binding_constraints]
raise ReferencedObjectDeletionNotAllowed(area_id, binding_ids, object_type="Area")
self.areas.delete_area(study, area_id)
self.event_bus.push(
Event(
Expand All @@ -1874,9 +1886,29 @@
area_to: str,
params: RequestParameters,
) -> None:
"""
Delete link from study if it is not referenced by a binding constraint,
otherwise raise an HTTP 403 Forbidden error.

Args:
uuid: The study ID.
area_from: The area from which the link starts.
area_to: The area to which the link ends.
params: The request parameters used to check user permissions.

Raises:
ReferencedObjectDeletionNotAllowed: If the link is referenced by a binding constraint.
"""
study = self.get_study(uuid)
assert_permission(params.user, study, StudyPermissionType.WRITE)
self._assert_study_unarchived(study)
link_id = LinkTerm(area1=area_from, area2=area_to).generate_id()
referencing_binding_constraints = self.binding_constraint_manager.get_binding_constraints(
study, ConstraintFilters(link_id=link_id)
)
if referencing_binding_constraints:
binding_ids = [bc.id for bc in referencing_binding_constraints]
raise ReferencedObjectDeletionNotAllowed(link_id, binding_ids, object_type="Link")
self.links.delete_link(study, area_from, area_to)
self.event_bus.push(
Event(
Expand Down Expand Up @@ -2518,3 +2550,26 @@
)

return df_matrix

def asserts_no_thermal_in_binding_constraints(
self, study: Study, area_id: str, cluster_ids: t.Sequence[str]
) -> None:
"""
Check that no cluster is referenced in a binding constraint, otherwise raise an HTTP 403 Forbidden error.

Args:
study: input study for which an update is to be committed
area_id: area ID to be checked
cluster_ids: IDs of the thermal clusters to be checked

Raises:
ReferencedObjectDeletionNotAllowed: if a cluster is referenced in a binding constraint
"""

for cluster_id in cluster_ids:
ref_bcs = self.binding_constraint_manager.get_binding_constraints(
study, ConstraintFilters(cluster_id=f"{area_id}.{cluster_id}")
)
if ref_bcs:
binding_ids = [bc.id for bc in ref_bcs]
raise ReferencedObjectDeletionNotAllowed(cluster_id, binding_ids, object_type="Cluster")
1 change: 1 addition & 0 deletions antarest/study/web/study_data_blueprint.py
Original file line number Diff line number Diff line change
Expand Up @@ -2185,6 +2185,7 @@ def delete_thermal_clusters(
)
request_params = RequestParameters(user=current_user)
study = study_service.check_study_access(uuid, StudyPermissionType.WRITE, request_params)
study_service.asserts_no_thermal_in_binding_constraints(study, area_id, cluster_ids)
study_service.thermal_manager.delete_clusters(study, area_id, cluster_ids)

@bp.get(
Expand Down
25 changes: 25 additions & 0 deletions tests/core/test_exceptions.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
from antarest.core.exceptions import ReferencedObjectDeletionNotAllowed


class TestReferencedObjectDeletionNotAllowed:
def test_few_binding_constraints(self) -> None:
object_id = "france"
binding_ids = ["bc1", "bc2"]
object_type = "Area"
exception = ReferencedObjectDeletionNotAllowed(object_id, binding_ids, object_type=object_type)
message = str(exception)
assert f"{object_type} '{object_id}'" in message
assert "bc1" in message
assert "bc2" in message
assert "more..." not in message

def test_many_binding_constraints(self) -> None:
object_id = "france"
binding_ids = [f"bc{i}" for i in range(1, 50)]
object_type = "Area"
exception = ReferencedObjectDeletionNotAllowed(object_id, binding_ids, object_type=object_type)
message = str(exception)
assert f"{object_type} '{object_id}'" in message
assert "bc1" in message
assert "bc2" in message
assert "more..." in message
Loading
Loading