Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix(tags): resolve issue with study.additional_data.patch attribute reading #1944

Merged
merged 2 commits into from
Feb 23, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
31 changes: 18 additions & 13 deletions antarest/study/storage/patch_service.py
Original file line number Diff line number Diff line change
@@ -1,42 +1,47 @@
import logging
import json
import typing as t
from pathlib import Path
from typing import Optional, Union

from antarest.study.model import Patch, PatchOutputs, RawStudy, StudyAdditionalData
from antarest.study.repository import StudyMetadataRepository
from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy
from antarest.study.storage.variantstudy.model.dbmodel import VariantStudy

logger = logging.getLogger(__name__)
PATCH_JSON = "patch.json"


class PatchService:
def __init__(self, repository: Optional[StudyMetadataRepository] = None):
"""
Handle patch file ("patch.json") for a RawStudy or VariantStudy
"""

def __init__(self, repository: t.Optional[StudyMetadataRepository] = None):
self.repository = repository

def get(self, study: Union[RawStudy, VariantStudy], get_from_file: bool = False) -> Patch:
if not get_from_file:
def get(self, study: t.Union[RawStudy, VariantStudy], get_from_file: bool = False) -> Patch:
if not get_from_file and study.additional_data is not None:
# the `study.additional_data.patch` field is optional
if patch_data := study.additional_data.patch:
return Patch.parse_raw(patch_data)
if study.additional_data.patch:
patch_obj = json.loads(study.additional_data.patch or "{}")
return Patch.parse_obj(patch_obj)

patch = Patch()
patch_path = Path(study.path) / "patch.json"
patch_path = Path(study.path) / PATCH_JSON
if patch_path.exists():
patch = Patch.parse_file(patch_path)

return patch

def get_from_filestudy(self, file_study: FileStudy) -> Patch:
patch = Patch()
patch_path = (Path(file_study.config.study_path)) / "patch.json"
patch_path = (Path(file_study.config.study_path)) / PATCH_JSON
if patch_path.exists():
patch = Patch.parse_file(patch_path)
return patch

def set_reference_output(
self,
study: Union[RawStudy, VariantStudy],
study: t.Union[RawStudy, VariantStudy],
output_id: str,
status: bool = True,
) -> None:
Expand All @@ -47,12 +52,12 @@ def set_reference_output(
patch.outputs = PatchOutputs(reference=output_id)
self.save(study, patch)

def save(self, study: Union[RawStudy, VariantStudy], patch: Patch) -> None:
def save(self, study: t.Union[RawStudy, VariantStudy], patch: Patch) -> None:
if self.repository:
study.additional_data = study.additional_data or StudyAdditionalData()
study.additional_data.patch = patch.json()
self.repository.save(study)

patch_path = (Path(study.path)) / "patch.json"
patch_path = (Path(study.path)) / PATCH_JSON
patch_path.parent.mkdir(parents=True, exist_ok=True)
patch_path.write_text(patch.json())
20 changes: 10 additions & 10 deletions antarest/study/storage/rawstudy/raw_study_service.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
import logging
import shutil
import time
import typing as t
from datetime import datetime
from pathlib import Path
from threading import Thread
from typing import BinaryIO, List, Optional, Sequence
from uuid import uuid4
from zipfile import ZipFile

Expand Down Expand Up @@ -61,7 +61,7 @@ def __init__(
)
self.cleanup_thread.start()

def update_from_raw_meta(self, metadata: RawStudy, fallback_on_default: Optional[bool] = False) -> None:
def update_from_raw_meta(self, metadata: RawStudy, fallback_on_default: t.Optional[bool] = False) -> None:
"""
Update metadata from study raw metadata
Args:
Expand Down Expand Up @@ -90,7 +90,7 @@ def update_from_raw_meta(self, metadata: RawStudy, fallback_on_default: Optional
metadata.version = metadata.version or 0
metadata.created_at = metadata.created_at or datetime.utcnow()
metadata.updated_at = metadata.updated_at or datetime.utcnow()
if not metadata.additional_data:
if metadata.additional_data is None:
metadata.additional_data = StudyAdditionalData()
metadata.additional_data.patch = metadata.additional_data.patch or Patch().json()
metadata.additional_data.author = metadata.additional_data.author or "Unknown"
Expand Down Expand Up @@ -148,7 +148,7 @@ def get_raw(
self,
metadata: RawStudy,
use_cache: bool = True,
output_dir: Optional[Path] = None,
output_dir: t.Optional[Path] = None,
) -> FileStudy:
"""
Fetch a study object and its config
Expand All @@ -163,7 +163,7 @@ def get_raw(
study_path = self.get_study_path(metadata)
return self.study_factory.create_from_fs(study_path, metadata.id, output_dir, use_cache=use_cache)

def get_synthesis(self, metadata: RawStudy, params: Optional[RequestParameters] = None) -> FileStudyTreeConfigDTO:
def get_synthesis(self, metadata: RawStudy, params: t.Optional[RequestParameters] = None) -> FileStudyTreeConfigDTO:
self._check_study_exists(metadata)
study_path = self.get_study_path(metadata)
study = self.study_factory.create_from_fs(study_path, metadata.id)
Expand Down Expand Up @@ -206,7 +206,7 @@ def copy(
self,
src_meta: RawStudy,
dest_name: str,
groups: Sequence[str],
groups: t.Sequence[str],
with_outputs: bool = False,
) -> RawStudy:
"""
Expand All @@ -223,7 +223,7 @@ def copy(
"""
self._check_study_exists(src_meta)

if not src_meta.additional_data:
if src_meta.additional_data is None:
additional_data = StudyAdditionalData()
else:
additional_data = StudyAdditionalData(
Expand Down Expand Up @@ -295,7 +295,7 @@ def delete_output(self, metadata: RawStudy, output_name: str) -> None:
output_path.unlink(missing_ok=True)
remove_from_cache(self.cache, metadata.id)

def import_study(self, metadata: RawStudy, stream: BinaryIO) -> Study:
def import_study(self, metadata: RawStudy, stream: t.BinaryIO) -> Study:
"""
Import study in the directory of the study.

Expand Down Expand Up @@ -329,7 +329,7 @@ def export_study_flat(
metadata: RawStudy,
dst_path: Path,
outputs: bool = True,
output_list_filter: Optional[List[str]] = None,
output_list_filter: t.Optional[t.List[str]] = None,
denormalize: bool = True,
) -> None:
try:
Expand All @@ -352,7 +352,7 @@ def export_study_flat(
def check_errors(
self,
metadata: RawStudy,
) -> List[str]:
) -> t.List[str]:
"""
Check study antares data integrity
Args:
Expand Down
Loading
Loading