diff --git a/antarest/core/exceptions.py b/antarest/core/exceptions.py index d41d04d9cf..d6c392b9cf 100644 --- a/antarest/core/exceptions.py +++ b/antarest/core/exceptions.py @@ -688,6 +688,11 @@ def __init__(self, message: str) -> None: super().__init__(HTTPStatus.NOT_FOUND, message) +class PathIsAFolderError(HTTPException): + def __init__(self, message: str) -> None: + super().__init__(HTTPStatus.UNPROCESSABLE_ENTITY, message) + + class WorkspaceNotFound(HTTPException): """ This will be raised when we try to load a workspace that does not exist diff --git a/antarest/core/swagger.py b/antarest/core/swagger.py index 3d1b62441a..047f6c4809 100644 --- a/antarest/core/swagger.py +++ b/antarest/core/swagger.py @@ -10,9 +10,10 @@ # # This file is part of the Antares project. -from typing import Any, List, Tuple +import typing as t from fastapi import FastAPI +from fastapi.openapi.models import Example from fastapi.routing import APIRoute sim = "{sim} = simulation index
" @@ -21,7 +22,7 @@ attachment = "User-defined file attachment
" # noinspection SpellCheckingInspection -urls: List[Tuple[str, str]] = [ +urls: t.List[t.Tuple[str, str]] = [ ("layers/layers", ""), ("settings/generaldata", ""), ("output/{sim}/about-the-study/parameters", sim), @@ -41,7 +42,7 @@ ] -def get_path_examples() -> Any: +def get_path_examples() -> t.Dict[str, Example]: return {url: {"value": url, "description": des} for url, des in urls} diff --git a/antarest/core/utils/archives.py b/antarest/core/utils/archives.py index 2356653b78..d12082a835 100644 --- a/antarest/core/utils/archives.py +++ b/antarest/core/utils/archives.py @@ -145,7 +145,7 @@ def extract_file_to_tmp_dir(archive_path: Path, inside_archive_path: Path) -> t. return path, tmp_dir -def read_file_from_archive(archive_path: Path, posix_path: str) -> str: +def read_original_file_in_archive(archive_path: Path, posix_path: str) -> bytes: """ Read a file from an archive. @@ -154,21 +154,36 @@ def read_file_from_archive(archive_path: Path, posix_path: str) -> str: posix_path: path to the file inside the archive. Returns: - The content of the file as a string. + The content of the file as `bytes`. """ if archive_path.suffix == ArchiveFormat.ZIP: with zipfile.ZipFile(archive_path) as zip_obj: with zip_obj.open(posix_path) as f: - return f.read().decode("utf-8") + return f.read() elif archive_path.suffix == ArchiveFormat.SEVEN_ZIP: with py7zr.SevenZipFile(archive_path, mode="r") as szf: - file_text: str = szf.read([posix_path])[posix_path].read().decode("utf-8") - return file_text + output: bytes = szf.read([posix_path])[posix_path].read() + return output else: raise ValueError(f"Unsupported {archive_path.suffix} archive format for {archive_path}") +def read_file_from_archive(archive_path: Path, posix_path: str) -> str: + """ + Read a file from an archive. + + Args: + archive_path: the path to the archive file. + posix_path: path to the file inside the archive. + + Returns: + The content of the file as a string. + """ + + return read_original_file_in_archive(archive_path, posix_path).decode("utf-8") + + def extract_lines_from_archive(root: Path, posix_path: str) -> t.List[str]: """ Extract text lines from various types of files. diff --git a/antarest/study/business/matrix_management.py b/antarest/study/business/matrix_management.py index fcdfc07eb8..a80034b603 100644 --- a/antarest/study/business/matrix_management.py +++ b/antarest/study/business/matrix_management.py @@ -252,10 +252,7 @@ def update_matrix( try: logger.info(f"Loading matrix data from node '{path}'...") - matrix_df = cast( - pd.DataFrame, - matrix_node.parse(return_dataframe=True), - ) + matrix_df = matrix_node.parse_as_dataframe() except ValueError as exc: raise MatrixManagerError(f"Cannot parse matrix: {exc}") from exc diff --git a/antarest/study/common/studystorage.py b/antarest/study/common/studystorage.py index 906564da35..cedb352051 100644 --- a/antarest/study/common/studystorage.py +++ b/antarest/study/common/studystorage.py @@ -20,6 +20,7 @@ from antarest.study.model import Study, StudyMetadataDTO, StudyMetadataPatchDTO, StudySimResultDTO from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfigDTO from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy +from antarest.study.storage.rawstudy.model.filesystem.inode import OriginalFile T = t.TypeVar("T", bound=Study) @@ -56,6 +57,23 @@ def get( """ + @abstractmethod + def get_file( + self, + metadata: T, + url: str = "", + ) -> OriginalFile: + """ + Entry point to fetch for a specific file inside a study folder + + Args: + metadata: study + url: path data inside study to reach the file + + Returns: study file content and extension + + """ + @abstractmethod def exists(self, metadata: T) -> bool: """ diff --git a/antarest/study/service.py b/antarest/study/service.py index 71d4bc3881..3e198addb3 100644 --- a/antarest/study/service.py +++ b/antarest/study/service.py @@ -134,7 +134,7 @@ from antarest.study.storage.matrix_profile import adjust_matrix_columns_index from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfigDTO from antarest.study.storage.rawstudy.model.filesystem.ini_file_node import IniFileNode -from antarest.study.storage.rawstudy.model.filesystem.inode import INode +from antarest.study.storage.rawstudy.model.filesystem.inode import INode, OriginalFile from antarest.study.storage.rawstudy.model.filesystem.matrix.input_series_matrix import InputSeriesMatrix from antarest.study.storage.rawstudy.model.filesystem.matrix.matrix import MatrixFrequency from antarest.study.storage.rawstudy.model.filesystem.matrix.output_series_matrix import OutputSeriesMatrix @@ -451,6 +451,30 @@ def get( return self.storage_service.get_storage(study).get(study, url, depth, formatted) + def get_file( + self, + uuid: str, + url: str, + params: RequestParameters, + ) -> OriginalFile: + """ + retrieve a file from a study folder + + Args: + uuid: study uuid + url: route to follow inside study structure + params: request parameters + + Returns: data study formatted in json + + """ + study = self.get_study(uuid) + assert_permission(params.user, study, StudyPermissionType.READ) + + output = self.storage_service.get_storage(study).get_file(study, url) + + return output + def aggregate_output_data( self, uuid: str, diff --git a/antarest/study/storage/abstract_storage_service.py b/antarest/study/storage/abstract_storage_service.py index ccaa477673..3b4c002597 100644 --- a/antarest/study/storage/abstract_storage_service.py +++ b/antarest/study/storage/abstract_storage_service.py @@ -18,8 +18,6 @@ from pathlib import Path from uuid import uuid4 -import py7zr - from antarest.core.config import Config from antarest.core.exceptions import BadOutputError, StudyOutputNotFoundError from antarest.core.interfaces.cache import CacheConstants, ICache @@ -45,6 +43,7 @@ from antarest.study.storage.rawstudy.model.filesystem.config.files import get_playlist from antarest.study.storage.rawstudy.model.filesystem.config.model import Simulation from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy, StudyFactory +from antarest.study.storage.rawstudy.model.filesystem.inode import OriginalFile from antarest.study.storage.rawstudy.model.helpers import FileStudyHelpers from antarest.study.storage.utils import extract_output_name, fix_study_root, remove_from_cache @@ -171,6 +170,30 @@ def get( del study return data + def get_file( + self, + metadata: T, + url: str = "", + use_cache: bool = True, + ) -> OriginalFile: + """ + Entry point to fetch data inside study. + Args: + metadata: study + url: path data inside study to reach + use_cache: indicate if the cache must be used + + Returns: a file content with its extension and name + + """ + self._check_study_exists(metadata) + study = self.get_raw(metadata, use_cache) + parts = [item for item in url.split("/") if item] + + file_node = study.tree.get_node(parts) + + return file_node.get_file_content() + def get_study_sim_result( self, study: T, diff --git a/antarest/study/storage/rawstudy/model/filesystem/folder_node.py b/antarest/study/storage/rawstudy/model/filesystem/folder_node.py index 58b42a1388..974c2acdfa 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/folder_node.py +++ b/antarest/study/storage/rawstudy/model/filesystem/folder_node.py @@ -14,11 +14,11 @@ import typing as t from abc import ABC, abstractmethod -from antarest.core.exceptions import ChildNotFoundError +from antarest.core.exceptions import ChildNotFoundError, PathIsAFolderError from antarest.core.model import JSON, SUB_JSON from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig from antarest.study.storage.rawstudy.model.filesystem.context import ContextServer -from antarest.study.storage.rawstudy.model.filesystem.inode import TREE, INode +from antarest.study.storage.rawstudy.model.filesystem.inode import TREE, INode, OriginalFile class FilterError(Exception): @@ -216,3 +216,7 @@ def extract_child(self, children: TREE, url: t.List[str]) -> t.Tuple[t.List[str] if not isinstance(children[name], child_class): raise FilterError("Filter selection has different classes") return names, sub_url + + def get_file_content(self) -> OriginalFile: + relative_path = self.config.path.relative_to(self.config.study_path).as_posix() + raise PathIsAFolderError(f"Node at {relative_path} is a folder node.") diff --git a/antarest/study/storage/rawstudy/model/filesystem/inode.py b/antarest/study/storage/rawstudy/model/filesystem/inode.py index 4b1046162a..d910234f03 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/inode.py +++ b/antarest/study/storage/rawstudy/model/filesystem/inode.py @@ -11,11 +11,12 @@ # This file is part of the Antares project. from abc import ABC, abstractmethod +from dataclasses import dataclass from pathlib import Path from typing import Any, Dict, Generic, List, Optional, Tuple, TypeVar from antarest.core.exceptions import WritingInsideZippedFileException -from antarest.core.utils.archives import extract_file_to_tmp_dir +from antarest.core.utils.archives import extract_file_to_tmp_dir, read_original_file_in_archive from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig G = TypeVar("G") @@ -23,6 +24,13 @@ V = TypeVar("V") +@dataclass +class OriginalFile: + suffix: str + content: bytes + filename: str + + class INode(ABC, Generic[G, S, V]): """ Abstract tree element, have to be implemented to create hub or left. @@ -124,6 +132,21 @@ def denormalize(self) -> None: """ raise NotImplementedError() + def get_file_content(self) -> OriginalFile: + suffix = self.config.path.suffix + filename = self.config.path.name + if self.config.archive_path: + content = read_original_file_in_archive( + self.config.archive_path, + self.get_relative_path_inside_archive(self.config.archive_path), + ) + return OriginalFile(suffix=suffix, filename=filename, content=content) + else: + return OriginalFile(content=self.config.path.read_bytes(), suffix=suffix, filename=filename) + + def get_relative_path_inside_archive(self, archive_path: Path) -> str: + return self.config.path.relative_to(archive_path.parent / self.config.study_id).as_posix() + def _assert_url_end(self, url: Optional[List[str]] = None) -> None: """ Raise error if elements remain in url diff --git a/antarest/study/storage/rawstudy/model/filesystem/lazy_node.py b/antarest/study/storage/rawstudy/model/filesystem/lazy_node.py index 2662cde82b..296f3efc13 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/lazy_node.py +++ b/antarest/study/storage/rawstudy/model/filesystem/lazy_node.py @@ -9,7 +9,6 @@ # SPDX-License-Identifier: MPL-2.0 # # This file is part of the Antares project. - import typing as t from abc import ABC, abstractmethod from dataclasses import dataclass diff --git a/antarest/study/storage/rawstudy/model/filesystem/matrix/input_series_matrix.py b/antarest/study/storage/rawstudy/model/filesystem/matrix/input_series_matrix.py index 2efb5a3f05..ec9d04a2a9 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/matrix/input_series_matrix.py +++ b/antarest/study/storage/rawstudy/model/filesystem/matrix/input_series_matrix.py @@ -9,11 +9,11 @@ # SPDX-License-Identifier: MPL-2.0 # # This file is part of the Antares project. - +import io import logging import shutil +import typing as t from pathlib import Path -from typing import Any, List, Optional, Union, cast import numpy as np import pandas as pd @@ -22,10 +22,12 @@ from antarest.core.exceptions import ChildNotFoundError from antarest.core.model import JSON +from antarest.core.utils.archives import read_original_file_in_archive from antarest.core.utils.utils import StopWatch from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig from antarest.study.storage.rawstudy.model.filesystem.context import ContextServer -from antarest.study.storage.rawstudy.model.filesystem.matrix.matrix import MatrixFrequency, MatrixNode +from antarest.study.storage.rawstudy.model.filesystem.inode import OriginalFile +from antarest.study.storage.rawstudy.model.filesystem.matrix.matrix import MatrixFrequency, MatrixNode, dump_dataframe logger = logging.getLogger(__name__) @@ -40,8 +42,8 @@ def __init__( context: ContextServer, config: FileStudyTreeConfig, freq: MatrixFrequency = MatrixFrequency.HOURLY, - nb_columns: Optional[int] = None, - default_empty: Optional[npt.NDArray[np.float64]] = None, + nb_columns: t.Optional[int] = None, + default_empty: t.Optional[npt.NDArray[np.float64]] = None, ): super().__init__(context=context, config=config, freq=freq) self.nb_columns = nb_columns @@ -52,21 +54,15 @@ def __init__( self.default_empty = np.copy(default_empty) self.default_empty.flags.writeable = True - def parse( - self, - file_path: Optional[Path] = None, - tmp_dir: Any = None, - return_dataframe: bool = False, - ) -> Union[JSON, pd.DataFrame]: + def parse_as_dataframe(self, file_path: t.Optional[Path] = None) -> pd.DataFrame: file_path = file_path or self.config.path try: - # sourcery skip: extract-method stopwatch = StopWatch() link_path = self.get_link_path() if link_path.exists(): link = link_path.read_text() matrix_json = self.context.resolver.resolve(link) - matrix_json = cast(JSON, matrix_json) + matrix_json = t.cast(JSON, matrix_json) matrix: pd.DataFrame = pd.DataFrame(**matrix_json) else: try: @@ -83,29 +79,29 @@ def parse( study_id = self.config.study_id relpath = file_path.relative_to(self.config.study_path).as_posix() raise ChildNotFoundError(f"File '{relpath}' not found in the study '{study_id}'") from e - stopwatch.log_elapsed(lambda x: logger.info(f"Matrix parsed in {x}s")) final_matrix = matrix.dropna(how="any", axis=1) - if return_dataframe: - return final_matrix - - data = cast(JSON, final_matrix.to_dict(orient="split")) - stopwatch.log_elapsed(lambda x: logger.info(f"Matrix to dict in {x}s")) - - return data + return final_matrix except EmptyDataError: logger.warning(f"Empty file found when parsing {file_path}") - matrix = pd.DataFrame() + final_matrix = pd.DataFrame() if self.default_empty is not None: - matrix = pd.DataFrame(self.default_empty) - return matrix if return_dataframe else cast(JSON, matrix.to_dict(orient="split")) + final_matrix = pd.DataFrame(self.default_empty) + return final_matrix + + def parse_as_json(self, file_path: t.Optional[Path] = None) -> JSON: + df = self.parse_as_dataframe(file_path) + stopwatch = StopWatch() + data = t.cast(JSON, df.to_dict(orient="split")) + stopwatch.log_elapsed(lambda x: logger.info(f"Matrix to dict in {x}s")) + return data def check_errors( self, data: JSON, - url: Optional[List[str]] = None, + url: t.Optional[t.List[str]] = None, raising: bool = False, - ) -> List[str]: + ) -> t.List[str]: self._assert_url_end(url) errors = [] @@ -131,3 +127,22 @@ def copy_file(self, target: str) -> None: target_path = self.config.path.parent.joinpath(f"{target}{''.join(self._infer_path().suffixes)}") target_path.unlink(missing_ok=True) shutil.copy(self._infer_path(), target_path) + + def get_file_content(self) -> OriginalFile: + suffix = self.config.path.suffix + filename = self.config.path.name + if self.config.archive_path: + content = read_original_file_in_archive( + self.config.archive_path, self.get_relative_path_inside_archive(self.config.archive_path) + ) + elif self.get_link_path().is_file(): + target_path = self.config.path.with_suffix(".txt") + buffer = io.BytesIO() + df = self.parse_as_dataframe() + dump_dataframe(df, buffer, None) + content = buffer.getvalue() + suffix = target_path.suffix + filename = target_path.name + else: + content = self.config.path.read_bytes() + return OriginalFile(content=content, suffix=suffix, filename=filename) diff --git a/antarest/study/storage/rawstudy/model/filesystem/matrix/matrix.py b/antarest/study/storage/rawstudy/model/filesystem/matrix/matrix.py index 427631427a..6af089a8a5 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/matrix/matrix.py +++ b/antarest/study/storage/rawstudy/model/filesystem/matrix/matrix.py @@ -9,12 +9,12 @@ # SPDX-License-Identifier: MPL-2.0 # # This file is part of the Antares project. - +import io import logging from abc import ABC, abstractmethod from enum import StrEnum from pathlib import Path -from typing import Any, List, Optional, Union, cast +from typing import List, Optional, Union, cast import pandas as pd @@ -41,12 +41,12 @@ class MatrixFrequency(StrEnum): HOURLY = "hourly" -def dump_dataframe(df: pd.DataFrame, path: Path, float_format: Optional[str] = "%.6f") -> None: - if df.empty: - path.write_bytes(b"") +def dump_dataframe(df: pd.DataFrame, path_or_buf: Path | io.BytesIO, float_format: Optional[str] = "%.6f") -> None: + if df.empty and isinstance(path_or_buf, Path): + path_or_buf.write_bytes(b"") else: df.to_csv( - path, + path_or_buf, sep="\t", header=False, index=False, @@ -87,7 +87,7 @@ def normalize(self) -> None: if self.get_link_path().exists() or self.config.archive_path: return - matrix = self.parse() + matrix = self.parse_as_json() if "data" in matrix: data = cast(List[List[float]], matrix["data"]) @@ -131,17 +131,12 @@ def load( tmp_dir.cleanup() return b"" - return cast(JSON, self.parse(file_path, tmp_dir)) + return self.parse_as_json(file_path) @abstractmethod - def parse( - self, - file_path: Optional[Path] = None, - tmp_dir: Any = None, - return_dataframe: bool = False, - ) -> Union[JSON, pd.DataFrame]: + def parse_as_json(self, file_path: Optional[Path] = None) -> JSON: """ - Parse the matrix content + Parse the matrix content and return it as a JSON object """ raise NotImplementedError() diff --git a/antarest/study/storage/variantstudy/model/command/generate_thermal_cluster_timeseries.py b/antarest/study/storage/variantstudy/model/command/generate_thermal_cluster_timeseries.py index ff2935d30e..e6e4bf17a1 100644 --- a/antarest/study/storage/variantstudy/model/command/generate_thermal_cluster_timeseries.py +++ b/antarest/study/storage/variantstudy/model/command/generate_thermal_cluster_timeseries.py @@ -25,6 +25,7 @@ from antarest.study.storage.rawstudy.model.filesystem.config.model import Area, FileStudyTreeConfig from antarest.study.storage.rawstudy.model.filesystem.config.thermal import LocalTSGenerationBehavior from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy +from antarest.study.storage.rawstudy.model.filesystem.matrix.input_series_matrix import InputSeriesMatrix from antarest.study.storage.rawstudy.model.filesystem.matrix.matrix import dump_dataframe from antarest.study.storage.utils import TS_GEN_PREFIX, TS_GEN_SUFFIX from antarest.study.storage.variantstudy.model.command.common import CommandName, CommandOutput @@ -36,8 +37,6 @@ MODULATION_CAPACITY_COLUMN = 2 -FO_RATE_COLUMN = 2 -PO_RATE_COLUMN = 3 class GenerateThermalClusterTimeSeries(ICommand): @@ -94,15 +93,19 @@ def _build_timeseries( # 7- Build the cluster url = ["input", "thermal", "prepro", area_id, thermal.id.lower(), "modulation"] matrix = study_data.tree.get_node(url) - matrix_df = matrix.parse(return_dataframe=True) # type: ignore + assert isinstance(matrix, InputSeriesMatrix) + matrix_df = matrix.parse_as_dataframe() modulation_capacity = matrix_df[MODULATION_CAPACITY_COLUMN].to_numpy() url = ["input", "thermal", "prepro", area_id, thermal.id.lower(), "data"] matrix = study_data.tree.get_node(url) - matrix_df = matrix.parse(return_dataframe=True) # type: ignore - fo_duration, po_duration, fo_rate, po_rate, npo_min, npo_max = [ - np.array(matrix_df[i], dtype=float if i in [FO_RATE_COLUMN, PO_RATE_COLUMN] else int) - for i in matrix_df.columns - ] + assert isinstance(matrix, InputSeriesMatrix) + matrix_df = matrix.parse_as_dataframe() + fo_duration = np.array(matrix_df[0], dtype=int) + po_duration = np.array(matrix_df[1], dtype=int) + fo_rate = np.array(matrix_df[2], dtype=float) + po_rate = np.array(matrix_df[3], dtype=float) + npo_min = np.array(matrix_df[4], dtype=int) + npo_max = np.array(matrix_df[5], dtype=int) generation_params = OutageGenerationParameters( unit_count=thermal.unit_count, fo_law=ProbabilityLaw(thermal.law_forced.value.upper()), diff --git a/antarest/study/storage/variantstudy/variant_study_service.py b/antarest/study/storage/variantstudy/variant_study_service.py index 37493be684..75154dae95 100644 --- a/antarest/study/storage/variantstudy/variant_study_service.py +++ b/antarest/study/storage/variantstudy/variant_study_service.py @@ -57,6 +57,7 @@ from antarest.study.storage.patch_service import PatchService from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig, FileStudyTreeConfigDTO from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy, StudyFactory +from antarest.study.storage.rawstudy.model.filesystem.inode import OriginalFile from antarest.study.storage.rawstudy.raw_study_service import RawStudyService from antarest.study.storage.utils import assert_permission, export_study_flat, is_managed, remove_from_cache from antarest.study.storage.variantstudy.business.utils import transform_command_to_dto @@ -560,6 +561,29 @@ def get( use_cache=use_cache, ) + def get_file( + self, + metadata: VariantStudy, + url: str = "", + use_cache: bool = True, + ) -> OriginalFile: + """ + Entry point to fetch for a file inside a study folder. + Args: + metadata: study + url: path data inside study to reach + use_cache: indicate if cache should be used to fetch study tree + + Returns: the file content and extension + """ + self._safe_generation(metadata, timeout=600) + self.repository.refresh(metadata) + return super().get_file( + metadata=metadata, + url=url, + use_cache=use_cache, + ) + def create_variant_study(self, uuid: str, name: str, params: RequestParameters) -> VariantStudy: """ Create a new variant study. diff --git a/antarest/study/web/raw_studies_blueprint.py b/antarest/study/web/raw_studies_blueprint.py index fcab2738fd..730402a9ee 100644 --- a/antarest/study/web/raw_studies_blueprint.py +++ b/antarest/study/web/raw_studies_blueprint.py @@ -18,7 +18,7 @@ from pathlib import Path, PurePosixPath from fastapi import APIRouter, Body, Depends, File, HTTPException -from fastapi.params import Param, Query +from fastapi.params import Query from starlette.responses import FileResponse, JSONResponse, PlainTextResponse, Response, StreamingResponse from antarest.core.config import Config @@ -75,9 +75,14 @@ ".txt": ("text/plain", "utf-8"), # (JSON) ".json": ("application/json", "utf-8"), + # (INI FILE) + ".ini": ("text/plain", "utf-8"), + # (antares file) + ".antares": ("text/plain", "utf-8"), } DEFAULT_EXPORT_FORMAT = Query(TableExportFormat.CSV, alias="format", description="Export format", title="Export Format") +PATH_TYPE = t.Annotated[str, Query(openapi_examples=get_path_examples())] def _split_comma_separated_values(value: str, *, default: t.Sequence[str] = ()) -> t.Sequence[str]: @@ -110,9 +115,9 @@ def create_raw_study_routes( tags=[APITag.study_raw_data], summary="Retrieve Raw Data from Study: JSON, Text, or File Attachment", ) - def get_study( + def get_study_data( uuid: str, - path: str = Param("/", examples=get_path_examples()), # type: ignore + path: PATH_TYPE = "/", depth: int = 3, formatted: bool = True, current_user: JWTUser = Depends(auth.get_current_user), @@ -186,6 +191,43 @@ def get_study( json_response = to_json(output) return Response(content=json_response, media_type="application/json") + @bp.get( + "/studies/{uuid}/raw/original-file", + tags=[APITag.study_raw_data], + summary="Retrieve Raw file from a Study folder in its original format", + ) + def get_study_file( + uuid: str, + path: PATH_TYPE = "/", + current_user: JWTUser = Depends(auth.get_current_user), + ) -> t.Any: + """ + Fetches for a file in its original format from a study folder + + Parameters: + - `uuid`: The UUID of the study. + - `path`: The path to the file to fetch. + + Returns the fetched file in its original format. + """ + logger.info( + f"📘 Fetching file at {path} from study {uuid}", + extra={"user": current_user.id}, + ) + parameters = RequestParameters(user=current_user) + original_file = study_service.get_file(uuid, path, params=parameters) + filename = original_file.filename + output = original_file.content + suffix = original_file.suffix + headers = { + "Content-Disposition": f"attachment; filename={filename}", + } + + # Guess the suffix form the filename suffix + content_type, _ = CONTENT_TYPES.get(suffix, (None, None)) + media_type = content_type or "application/octet-stream" + return Response(content=output, media_type=media_type, headers=headers) + @bp.delete( "/studies/{uuid}/raw", tags=[APITag.study_raw_data], @@ -194,7 +236,14 @@ def get_study( ) def delete_file( uuid: str, - path: str = Param("/", examples=["user/wind_solar/synthesis_windSolar.xlsx"]), # type: ignore + path: t.Annotated[ + str, + Query( + openapi_examples={ + "user/wind_solar/synthesis_windSolar.xlsx": {"value": "user/wind_solar/synthesis_windSolar.xlsx"} + }, + ), + ] = "/", current_user: JWTUser = Depends(auth.get_current_user), ) -> t.Any: uuid = sanitize_uuid(uuid) @@ -481,7 +530,7 @@ def aggregate_links_raw_data__all( ) def edit_study( uuid: str, - path: str = Param("/", examples=get_path_examples()), # type: ignore + path: PATH_TYPE = "/", data: SUB_JSON = Body(default=""), current_user: JWTUser = Depends(auth.get_current_user), ) -> None: @@ -510,7 +559,7 @@ def edit_study( ) def replace_study_file( uuid: str, - path: str = Param("/", examples=get_path_examples()), # type: ignore + path: PATH_TYPE = "/", file: bytes = File(default=None), create_missing: bool = Query( False, diff --git a/tests/integration/raw_studies_blueprint/test_fetch_raw_data.py b/tests/integration/raw_studies_blueprint/test_fetch_raw_data.py index 58749c0b09..e30e929a0b 100644 --- a/tests/integration/raw_studies_blueprint/test_fetch_raw_data.py +++ b/tests/integration/raw_studies_blueprint/test_fetch_raw_data.py @@ -62,9 +62,9 @@ class TestFetchRawData: """ @pytest.mark.parametrize("study_type", ["raw", "variant"]) - def test_get_study(self, client: TestClient, user_access_token: str, internal_study_id: str, study_type: str): + def test_get_study_data(self, client: TestClient, user_access_token: str, internal_study_id: str, study_type: str): """ - Test the `get_study` endpoint for fetching raw data from a study. + Test the `get_study_data` endpoint for fetching raw data from a study. This test retrieves raw data from a study identified by a UUID and checks if the returned data matches the expected data. @@ -297,206 +297,340 @@ def test_get_study(self, client: TestClient, user_access_token: str, internal_st res = client.get(raw_url, params={"path": path, "depth": depth}) assert res.status_code == 200, f"Error for path={path} and depth={depth}" + @pytest.mark.parametrize("study_type", ["raw", "variant"]) + def test_delete_raw( + self, client: TestClient, user_access_token: str, internal_study_id: str, study_type: str + ) -> None: + # ============================= + # SET UP + # ============================= + client.headers = {"Authorization": f"Bearer {user_access_token}"} + + if study_type == "variant": + # Copies the study, to convert it into a managed one. + res = client.post( + f"/v1/studies/{internal_study_id}/copy", + headers={"Authorization": f"Bearer {user_access_token}"}, + params={"dest": "default", "with_outputs": False, "use_task": False}, + ) + assert res.status_code == 201 + parent_id = res.json() + res = client.post(f"/v1/studies/{parent_id}/variants", params={"name": "variant 1"}) + internal_study_id = res.json() + + # ============================= + # NOMINAL CASES + # ============================= + + content = io.BytesIO(b"This is the end!") + file_1_path = "user/file_1.txt" + file_2_path = "user/folder/file_2.txt" + file_3_path = "user/folder_2/file_3.txt" + for f in [file_1_path, file_2_path, file_3_path]: + # Creates a file / folder inside user folder. + res = client.put( + f"/v1/studies/{internal_study_id}/raw", + params={"path": f, "create_missing": True}, + files={"file": content}, + ) + assert res.status_code == 204, res.json() + + # Deletes the file / folder + if f == file_2_path: + f = "user/folder" + res = client.delete(f"/v1/studies/{internal_study_id}/raw?path={f}") + assert res.status_code == 200 + # Asserts it doesn't exist anymore + res = client.get(f"/v1/studies/{internal_study_id}/raw?path={f}") + assert res.status_code == 404 + assert "not a child of" in res.json()["description"] + + # checks debug view + res = client.get(f"/v1/studies/{internal_study_id}/raw?path=&depth=-1") + assert res.status_code == 200 + tree = res.json()["user"] + if f == file_3_path: + # asserts the folder that wasn't deleted is still here. + assert list(tree.keys()) == ["expansion", "folder_2"] + assert tree["folder_2"] == {} + else: + # asserts deleted files cannot be seen inside the debug view + assert list(tree.keys()) == ["expansion"] + + # ============================= + # ERRORS + # ============================= + + # try to delete expansion folder + res = client.delete(f"/v1/studies/{internal_study_id}/raw?path=/user/expansion") + expected_msg = "you are not allowed to delete this resource" + _check_endpoint_response(study_type, res, client, internal_study_id, expected_msg, "ResourceDeletionNotAllowed") + + # try to delete a file which isn't inside the 'User' folder + res = client.delete(f"/v1/studies/{internal_study_id}/raw?path=/input/thermal") + expected_msg = "the given path isn't inside the 'User' folder" + assert res.status_code == 403 + assert res.json()["exception"] == "ResourceDeletionNotAllowed" + assert expected_msg in res.json()["description"] + + # With a path that doesn't exist + res = client.delete(f"/v1/studies/{internal_study_id}/raw?path=user/fake_folder/fake_file.txt") + expected_msg = "the given path doesn't exist" + _check_endpoint_response(study_type, res, client, internal_study_id, expected_msg, "ResourceDeletionNotAllowed") + + @pytest.mark.parametrize("study_type", ["raw", "variant"]) + def test_create_folder( + self, client: TestClient, user_access_token: str, internal_study_id: str, study_type: str + ) -> None: + client.headers = {"Authorization": f"Bearer {user_access_token}"} + + if study_type == "variant": + # Copies the study, to convert it into a managed one. + res = client.post( + f"/v1/studies/{internal_study_id}/copy", + headers={"Authorization": f"Bearer {user_access_token}"}, + params={"dest": "default", "with_outputs": False, "use_task": False}, + ) + assert res.status_code == 201 + parent_id = res.json() + res = client.post(f"/v1/studies/{parent_id}/variants", params={"name": "variant 1"}) + internal_study_id = res.json() + + raw_url = f"/v1/studies/{internal_study_id}/raw" + + # ============================= + # NOMINAL CASES + # ============================= + additional_params = {"resource_type": "folder", "create_missing": True} + + res = client.put(raw_url, params={"path": "user/folder_1", **additional_params}) + assert res.status_code == 204 + + # same case with different writing should succeed + res = client.put(raw_url, params={"path": "/user/folder_2", **additional_params}) + assert res.status_code == 204 + + # create a folder within a non-existing one + res = client.put(raw_url, params={"path": "/user/folder_x/folder_y", **additional_params}) + assert res.status_code == 204 + + # checks debug view to see that folders were created + res = client.get(f"/v1/studies/{internal_study_id}/raw?path=&depth=-1") + assert res.status_code == 200 + tree = res.json()["user"] + assert list(tree.keys()) == ["expansion", "folder_1", "folder_2", "folder_x"] + assert tree["folder_x"] == {"folder_y": {}} + + # ============================= + # ERRORS + # ============================= + + # we can't create a file without specifying a content + res = client.put(raw_url, params={"path": "fake_path"}) + assert res.status_code == 422 + assert res.json()["description"] == "Argument mismatch: Must give a content to create a file" + + # we can't create a folder and specify a content at the same time + res = client.put(raw_url, params={"path": "", "resource_type": "folder"}, files={"file": b"content"}) + assert res.status_code == 422 + assert res.json()["description"] == "Argument mismatch: Cannot give a content to create a folder" + + # try to create a folder outside `user` folder + wrong_folder = "input/wrong_folder" + expected_msg = f"the given path isn't inside the 'User' folder: {wrong_folder}" + res = client.put(raw_url, params={"path": wrong_folder, **additional_params}) + assert res.status_code == 403 + assert res.json()["exception"] == "FolderCreationNotAllowed" + assert expected_msg in res.json()["description"] + + # try to create a folder inside the 'expansion` folder + expansion_folder = "user/expansion/wrong_folder" + expected_msg = "you are not allowed to create a resource here" + res = client.put(raw_url, params={"path": expansion_folder, **additional_params}) + _check_endpoint_response(study_type, res, client, internal_study_id, expected_msg, "FolderCreationNotAllowed") + + # try to create an already existing folder + existing_folder = "user/folder_1" + expected_msg = "the given resource already exists" + res = client.put(raw_url, params={"path": existing_folder, **additional_params}) + _check_endpoint_response(study_type, res, client, internal_study_id, expected_msg, "FolderCreationNotAllowed") + + def test_retrieve_from_archive(self, client: TestClient, user_access_token: str) -> None: + # client headers + client.headers = {"Authorization": f"Bearer {user_access_token}"} + + # create a new study + res = client.post("/v1/studies?name=MyStudy") + assert res.status_code == 201 -@pytest.mark.parametrize("study_type", ["raw", "variant"]) -def test_delete_raw(client: TestClient, user_access_token: str, internal_study_id: str, study_type: str) -> None: - # ============================= - # SET UP - # ============================= - client.headers = {"Authorization": f"Bearer {user_access_token}"} + # get the study id + study_id = res.json() - if study_type == "variant": - # Copies the study, to convert it into a managed one. + # add a new area to the study res = client.post( - f"/v1/studies/{internal_study_id}/copy", - headers={"Authorization": f"Bearer {user_access_token}"}, - params={"dest": "default", "with_outputs": False, "use_task": False}, + f"/v1/studies/{study_id}/areas", + json={ + "name": "area 1", + "type": "AREA", + "metadata": {"country": "FR", "tags": ["a"]}, + }, ) - assert res.status_code == 201 - parent_id = res.json() - res = client.post(f"/v1/studies/{parent_id}/variants", params={"name": "variant 1"}) - internal_study_id = res.json() - - # ============================= - # NOMINAL CASES - # ============================= - - content = io.BytesIO(b"This is the end!") - file_1_path = "user/file_1.txt" - file_2_path = "user/folder/file_2.txt" - file_3_path = "user/folder_2/file_3.txt" - for f in [file_1_path, file_2_path, file_3_path]: - # Creates a file / folder inside user folder. - res = client.put( - f"/v1/studies/{internal_study_id}/raw", params={"path": f, "create_missing": True}, files={"file": content} + assert res.status_code == 200, res.json() + + # archive the study + res = client.put(f"/v1/studies/{study_id}/archive") + assert res.status_code == 200 + task_id = res.json() + wait_for( + lambda: client.get( + f"/v1/tasks/{task_id}", + ).json()["status"] + == 3 ) - assert res.status_code == 204, res.json() - # Deletes the file / folder - if f == file_2_path: - f = "user/folder" - res = client.delete(f"/v1/studies/{internal_study_id}/raw?path={f}") + # retrieve a `Desktop.ini` file from inside the archive + rel_path = "Desktop" + res = client.get( + f"/v1/studies/{study_id}/raw", + params={"path": rel_path, "formatted": True}, + ) assert res.status_code == 200 - # Asserts it doesn't exist anymore - res = client.get(f"/v1/studies/{internal_study_id}/raw?path={f}") - assert res.status_code == 404 - assert "not a child of" in res.json()["description"] - # checks debug view - res = client.get(f"/v1/studies/{internal_study_id}/raw?path=&depth=-1") + # retrieve a `study.antares` file from inside the archive + rel_path = "study" + res = client.get( + f"/v1/studies/{study_id}/raw", + params={"path": rel_path, "formatted": True}, + ) assert res.status_code == 200 - tree = res.json()["user"] - if f == file_3_path: - # asserts the folder that wasn't deleted is still here. - assert list(tree.keys()) == ["expansion", "folder_2"] - assert tree["folder_2"] == {} - else: - # asserts deleted files cannot be seen inside the debug view - assert list(tree.keys()) == ["expansion"] - - # ============================= - # ERRORS - # ============================= - - # try to delete expansion folder - res = client.delete(f"/v1/studies/{internal_study_id}/raw?path=/user/expansion") - expected_msg = "you are not allowed to delete this resource" - _check_endpoint_response(study_type, res, client, internal_study_id, expected_msg, "ResourceDeletionNotAllowed") - - # try to delete a file which isn't inside the 'User' folder - res = client.delete(f"/v1/studies/{internal_study_id}/raw?path=/input/thermal") - expected_msg = "the given path isn't inside the 'User' folder" - assert res.status_code == 403 - assert res.json()["exception"] == "ResourceDeletionNotAllowed" - assert expected_msg in res.json()["description"] - - # With a path that doesn't exist - res = client.delete(f"/v1/studies/{internal_study_id}/raw?path=user/fake_folder/fake_file.txt") - expected_msg = "the given path doesn't exist" - _check_endpoint_response(study_type, res, client, internal_study_id, expected_msg, "ResourceDeletionNotAllowed") - - -@pytest.mark.parametrize("study_type", ["raw", "variant"]) -def test_create_folder(client: TestClient, user_access_token: str, internal_study_id: str, study_type: str) -> None: - client.headers = {"Authorization": f"Bearer {user_access_token}"} - - if study_type == "variant": - # Copies the study, to convert it into a managed one. - res = client.post( - f"/v1/studies/{internal_study_id}/copy", - headers={"Authorization": f"Bearer {user_access_token}"}, - params={"dest": "default", "with_outputs": False, "use_task": False}, + + +@pytest.mark.integration_test +class TestFetchOriginalFile: + """ + Check the retrieval of a file from Study folder + """ + + def test_get_study_file( + self, + client: TestClient, + user_access_token: str, + internal_study_id: str, + ): + """ + Test the `get_study_file` endpoint for fetching for a file in its original format. + + This test retrieves a specific file from a study identified by a UUID and checks + + The test performs the following steps: + 1. Copies the user resources in the Study directory. + 2. Uses the API to download a file from the "user/folder" directory. + 3. Compares the fetched data with the expected file from disk. + 4. Check for cases where Errors should be returned. + """ + # First copy the user resources in the Study directory + with db(): + study: RawStudy = db.session.get(Study, internal_study_id) + study_dir = pathlib.Path(study.path) + client.headers = {"Authorization": f"Bearer {user_access_token}"} + original_file_url = f"/v1/studies/{internal_study_id}/raw/original-file" + + shutil.copytree( + ASSETS_DIR.joinpath("user"), + study_dir.joinpath("user"), + dirs_exist_ok=True, ) + + # Then, use the API to download the files from the "user/folder" directory + user_folder_dir = study_dir.joinpath("user/folder") + for file_path in user_folder_dir.glob("*.*"): + rel_path = file_path.relative_to(study_dir).as_posix() + res = client.get(original_file_url, params={"path": rel_path}) + assert res.status_code == 200, res.json() + actual = res.content + expected = file_path.read_bytes() + assert actual == expected + + # retrieves a txt file from the outputs + file_path = "output/20201014-1422eco-hello/simulation" + res = client.get(f"/v1/studies/{internal_study_id}/raw/original-file", params={"path": file_path}) + assert res.status_code == 200 + assert res.headers.get("content-disposition") == "attachment; filename=simulation.log" + actual = res.content + expected = study_dir.joinpath(f"{file_path}.log").read_bytes() + assert actual == expected + + # If the extension is unknown, we should have a "binary" content + user_folder_dir = study_dir.joinpath("user/unknown") + for file_path in user_folder_dir.glob("*.*"): + rel_path = file_path.relative_to(study_dir) + res = client.get(original_file_url, params={"path": f"/{rel_path.as_posix()}"}) + assert res.status_code == 200, res.json() + + actual = res.content + expected = file_path.read_bytes() + assert actual == expected + + # If you try to retrieve a file that doesn't exist, we should have a 404 error + res = client.get(original_file_url, params={"path": "user/somewhere/something.txt"}) + assert res.status_code == 404, res.json() + assert res.json() == { + "description": "'somewhere' not a child of User", + "exception": "ChildNotFoundError", + } + + # If you try to retrieve a folder, we should get an Error 422 + res = client.get(original_file_url, params={"path": "user/folder"}) + assert res.status_code == 422, res.json() + assert res.json()["description"] == "Node at user/folder is a folder node." + assert res.json()["exception"] == "PathIsAFolderError" + + @pytest.mark.parametrize("archive", [True, False]) + def test_retrieve_original_files(self, client: TestClient, user_access_token: str, archive: bool) -> None: + # client headers + client.headers = {"Authorization": f"Bearer {user_access_token}"} + + # create a new study + res = client.post("/v1/studies", params={"name": "MyStudy", "version": "880"}) assert res.status_code == 201 - parent_id = res.json() - res = client.post(f"/v1/studies/{parent_id}/variants", params={"name": "variant 1"}) - internal_study_id = res.json() - - raw_url = f"/v1/studies/{internal_study_id}/raw" - - # ============================= - # NOMINAL CASES - # ============================= - additional_params = {"resource_type": "folder", "create_missing": True} - - res = client.put(raw_url, params={"path": "user/folder_1", **additional_params}) - assert res.status_code == 204 - - # same case with different writing should succeed - res = client.put(raw_url, params={"path": "/user/folder_2", **additional_params}) - assert res.status_code == 204 - - # create a folder within a non-existing one - res = client.put(raw_url, params={"path": "/user/folder_x/folder_y", **additional_params}) - assert res.status_code == 204 - - # checks debug view to see that folders were created - res = client.get(f"/v1/studies/{internal_study_id}/raw?path=&depth=-1") - assert res.status_code == 200 - tree = res.json()["user"] - assert list(tree.keys()) == ["expansion", "folder_1", "folder_2", "folder_x"] - assert tree["folder_x"] == {"folder_y": {}} - - # ============================= - # ERRORS - # ============================= - - # we can't create a file without specifying a content - res = client.put(raw_url, params={"path": "fake_path"}) - assert res.status_code == 422 - assert res.json()["description"] == "Argument mismatch: Must give a content to create a file" - - # we can't create a folder and specify a content at the same time - res = client.put(raw_url, params={"path": "", "resource_type": "folder"}, files={"file": b"content"}) - assert res.status_code == 422 - assert res.json()["description"] == "Argument mismatch: Cannot give a content to create a folder" - - # try to create a folder outside `user` folder - wrong_folder = "input/wrong_folder" - expected_msg = f"the given path isn't inside the 'User' folder: {wrong_folder}" - res = client.put(raw_url, params={"path": wrong_folder, **additional_params}) - assert res.status_code == 403 - assert res.json()["exception"] == "FolderCreationNotAllowed" - assert expected_msg in res.json()["description"] - - # try to create a folder inside the 'expansion` folder - expansion_folder = "user/expansion/wrong_folder" - expected_msg = "you are not allowed to create a resource here" - res = client.put(raw_url, params={"path": expansion_folder, **additional_params}) - _check_endpoint_response(study_type, res, client, internal_study_id, expected_msg, "FolderCreationNotAllowed") - - # try to create an already existing folder - existing_folder = "user/folder_1" - expected_msg = "the given resource already exists" - res = client.put(raw_url, params={"path": existing_folder, **additional_params}) - _check_endpoint_response(study_type, res, client, internal_study_id, expected_msg, "FolderCreationNotAllowed") - - -def test_retrieve_from_archive(client: TestClient, user_access_token: str) -> None: - # client headers - client.headers = {"Authorization": f"Bearer {user_access_token}"} - - # create a new study - res = client.post("/v1/studies?name=MyStudy") - assert res.status_code == 201 - - # get the study id - study_id = res.json() - - # add a new area to the study - res = client.post( - f"/v1/studies/{study_id}/areas", - json={ - "name": "area 1", - "type": "AREA", - "metadata": {"country": "FR", "tags": ["a"]}, - }, - ) - assert res.status_code == 200, res.json() - - # archive the study - res = client.put(f"/v1/studies/{study_id}/archive") - assert res.status_code == 200 - task_id = res.json() - wait_for( - lambda: client.get( - f"/v1/tasks/{task_id}", - ).json()["status"] - == 3 - ) - - # retrieve a `Desktop.ini` file from inside the archive - rel_path = "Desktop" - res = client.get( - f"/v1/studies/{study_id}/raw", - params={"path": rel_path, "formatted": True}, - ) - assert res.status_code == 200 - - # retrieve a `study.antares` file from inside the archive - rel_path = "study" - res = client.get( - f"/v1/studies/{study_id}/raw", - params={"path": rel_path, "formatted": True}, - ) - assert res.status_code == 200 + study_id = res.json() + + # add a new area to the study + res = client.post( + f"/v1/studies/{study_id}/areas", + json={ + "name": "area 1", + "type": "AREA", + "metadata": {"country": "FR", "tags": ["a"]}, + }, + ) + assert res.status_code == 200, res.json() + + if archive: + # archive the study + res = client.put(f"/v1/studies/{study_id}/archive") + assert res.status_code == 200 + task_id = res.json() + wait_for(lambda: client.get(f"/v1/tasks/{task_id}").json()["status"] == 3) + + # retrieves an `ini` file + res = client.get( + f"/v1/studies/{study_id}/raw/original-file", params={"path": "input/areas/area 1/adequacy_patch"} + ) + assert res.status_code == 200 + assert res.headers.get("content-disposition") == "attachment; filename=adequacy_patch.ini" + assert res.content.strip().decode("utf-8").splitlines() == ["[adequacy-patch]", "adequacy-patch-mode = outside"] + + # retrieves the `study.antares` + res = client.get(f"/v1/studies/{study_id}/raw/original-file", params={"path": "study"}) + assert res.status_code == 200 + assert res.headers.get("content-disposition") == "attachment; filename=study.antares" + assert res.content.strip().decode().splitlines()[:3] == ["[antares]", "version = 880", "caption = MyStudy"] + + # retrieves a matrix (a link towards the matrix store if the study is unarchived, else the real matrix) + res = client.get(f"/v1/studies/{study_id}/raw/original-file", params={"path": "input/load/series/load_area 1"}) + assert res.status_code == 200 + assert res.headers.get("content-disposition") == "attachment; filename=load_area 1.txt" + expected_content = np.zeros((8760, 1)) + actual_content = pd.read_csv(io.BytesIO(res.content), header=None) + assert actual_content.to_numpy().tolist() == expected_content.tolist() diff --git a/tests/storage/repository/filesystem/matrix/test_matrix_node.py b/tests/storage/repository/filesystem/matrix/test_matrix_node.py index decac6e2e0..38dce45486 100644 --- a/tests/storage/repository/filesystem/matrix/test_matrix_node.py +++ b/tests/storage/repository/filesystem/matrix/test_matrix_node.py @@ -11,7 +11,6 @@ # This file is part of the Antares project. from pathlib import Path -from tempfile import TemporaryDirectory from typing import List, Optional from unittest.mock import Mock @@ -39,23 +38,9 @@ def __init__(self, context: ContextServer, config: FileStudyTreeConfig) -> None: freq=MatrixFrequency.ANNUAL, ) - def parse( - self, - file_path: Optional[Path] = None, - tmp_dir: Optional[TemporaryDirectory] = None, - return_dataframe: bool = False, - ) -> JSON: + def parse_as_json(self, file_path: Optional[Path] = None) -> JSON: return MOCK_MATRIX_JSON - # def dump( - # self, data: Union[bytes, JSON], url: Optional[List[str]] = None - # ) -> None: - # """Dump the matrix data in JSON format to simplify the tests""" - # self.config.path.parent.mkdir(exist_ok=True, parents=True) - # self.config.path.write_text( - # json.dumps(data, indent=2), encoding="utf-8" - # ) - def check_errors(self, data: str, url: Optional[List[str]] = None, raising: bool = False) -> List[str]: pass # not used