diff --git a/.github/workflows/compatibility.yml b/.github/workflows/compatibility.yml deleted file mode 100644 index 25d2506003..0000000000 --- a/.github/workflows/compatibility.yml +++ /dev/null @@ -1,50 +0,0 @@ -name: compatibility -on: - push: - branches: - - "compatibility" - -jobs: - python-test: - runs-on: ${{ matrix.os }} - strategy: - max-parallel: 9 - matrix: - os: [windows-latest, ubuntu-20.04, macOS-latest] - python-version: [3.8] - - steps: - - name: Checkout github repo - uses: actions/checkout@v4 - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install -r requirements-dev.txt - - name: Test with pytest - run: | - pytest - - npm-test: - runs-on: ${{ matrix.os }} - strategy: - max-parallel: 9 - matrix: - os: [windows-latest, ubuntu-20.04, macOS-latest] - node-version: [18.16.1] - steps: - - name: Checkout github repo - uses: actions/checkout@v4 - - name: Set up Node.js ${{ matrix.node-version }} - uses: actions/setup-node@v3 - with: - node-version: ${{ matrix.node-version }} - - name: Install dependencies - run: npm install - working-directory: webapp - - name: Build - run: npm run build - working-directory: webapp diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index 86eba91403..e91a2a0b65 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -24,7 +24,7 @@ jobs: run: choco install wget --no-progress - name: πŸ’š Set up Node.js - uses: actions/setup-node@v3 + uses: actions/setup-node@v4 with: node-version: 18.16.1 diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 1316ff07ca..fd2a6ddff9 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -38,7 +38,7 @@ jobs: strategy: max-parallel: 9 matrix: - os: [ windows-latest, ubuntu-20.04 ] + os: [windows-latest, ubuntu-20.04] steps: - name: Checkout github repo (+ download lfs dependencies) @@ -69,12 +69,12 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - os: [ ubuntu-20.04 ] + os: [ubuntu-20.04] steps: - name: Checkout github repo uses: actions/checkout@v4 - name: Set up Node.js ${{ matrix.node-version }} - uses: actions/setup-node@v3 + uses: actions/setup-node@v4 with: node-version: 18.16.1 - name: Install dependencies @@ -89,10 +89,13 @@ jobs: - name: Lint run: npm run lint working-directory: webapp + - name: Test + run: npm run test + working-directory: webapp sonarcloud: runs-on: ubuntu-20.04 - needs: [ python-test, npm-test ] + needs: [python-test, npm-test] steps: - uses: actions/checkout@v4 - name: Download python coverage report diff --git a/antarest/__init__.py b/antarest/__init__.py index f4f11de153..75af56d75a 100644 --- a/antarest/__init__.py +++ b/antarest/__init__.py @@ -7,9 +7,9 @@ # Standard project metadata -__version__ = "2.17.2" +__version__ = "2.17.3" __author__ = "RTE, Antares Web Team" -__date__ = "2024-06-19" +__date__ = "2024-07-18" # noinspection SpellCheckingInspection __credits__ = "(c) RΓ©seau de Transport de l’ÉlectricitΓ© (RTE)" diff --git a/antarest/core/exceptions.py b/antarest/core/exceptions.py index cf1b21d1a4..66859234fc 100644 --- a/antarest/core/exceptions.py +++ b/antarest/core/exceptions.py @@ -325,6 +325,35 @@ def __init__(self, is_variant: bool) -> None: super().__init__(HTTPStatus.EXPECTATION_FAILED, "Upgrade not supported for parent of variants") +class ReferencedObjectDeletionNotAllowed(HTTPException): + """ + Exception raised when a binding constraint is not allowed to be deleted because it references + other objects: areas, links or thermal clusters. + """ + + def __init__(self, object_id: str, binding_ids: t.Sequence[str], *, object_type: str) -> None: + """ + Initialize the exception. + + Args: + object_id: ID of the object that is not allowed to be deleted. + binding_ids: Binding constraints IDs that reference the object. + object_type: Type of the object that is not allowed to be deleted: area, link or thermal cluster. + """ + max_count = 10 + first_bcs_ids = ",\n".join(f"{i}- '{bc}'" for i, bc in enumerate(binding_ids[:max_count], 1)) + and_more = f",\nand {len(binding_ids) - max_count} more..." if len(binding_ids) > max_count else "." + message = ( + f"{object_type} '{object_id}' is not allowed to be deleted, because it is referenced" + f" in the following binding constraints:\n{first_bcs_ids}{and_more}" + ) + super().__init__(HTTPStatus.FORBIDDEN, message) + + def __str__(self) -> str: + """Return a string representation of the exception.""" + return self.detail + + class UnsupportedStudyVersion(HTTPException): def __init__(self, version: str) -> None: super().__init__( diff --git a/antarest/study/business/binding_constraint_management.py b/antarest/study/business/binding_constraint_management.py index 28881ef874..7f42bb7f59 100644 --- a/antarest/study/business/binding_constraint_management.py +++ b/antarest/study/business/binding_constraint_management.py @@ -501,13 +501,12 @@ def terms_to_coeffs(terms: t.Sequence[ConstraintTerm]) -> t.Dict[str, t.List[flo :return: A dictionary of term IDs mapped to a list of their coefficients. """ coeffs = {} - if terms is not None: - for term in terms: - if term.id and term.weight is not None: - coeffs[term.id] = [term.weight] - if term.offset: - coeffs[term.id].append(term.offset) - return coeffs + for term in terms: + if term.id and term.weight is not None: + coeffs[term.id] = [term.weight] + if term.offset: + coeffs[term.id].append(term.offset) + return coeffs def get_binding_constraint(self, study: Study, bc_id: str) -> ConstraintOutput: """ diff --git a/antarest/study/model.py b/antarest/study/model.py index ad3d1f0fb4..4eff8109ab 100644 --- a/antarest/study/model.py +++ b/antarest/study/model.py @@ -426,13 +426,37 @@ class ExportFormat(str, enum.Enum): TAR_GZ = "application/tar+gz" JSON = "application/json" - @staticmethod - def from_dto(data: str) -> "ExportFormat": - if data == "application/zip": - return ExportFormat.ZIP - if data == "application/tar+gz": - return ExportFormat.TAR_GZ - return ExportFormat.JSON + @classmethod + def from_dto(cls, accept_header: str) -> "ExportFormat": + """ + Convert the "Accept" header to the corresponding content type. + + Args: + accept_header: Value of the "Accept" header. + + Returns: + The corresponding content type: ZIP, TAR_GZ or JSON. + By default, JSON is returned if the format is not recognized. + For instance, if the "Accept" header is "*/*", JSON is returned. + """ + mapping = { + "application/zip": ExportFormat.ZIP, + "application/tar+gz": ExportFormat.TAR_GZ, + "application/json": ExportFormat.JSON, + } + return mapping.get(accept_header, ExportFormat.JSON) + + @property + def suffix(self) -> str: + """ + Returns the file suffix associated with the format: ".zip", ".tar.gz" or ".json". + """ + mapping = { + ExportFormat.ZIP: ".zip", + ExportFormat.TAR_GZ: ".tar.gz", + ExportFormat.JSON: ".json", + } + return mapping[self] class StudyDownloadDTO(BaseModel): diff --git a/antarest/study/service.py b/antarest/study/service.py index 0330992da2..494e7d2f2f 100644 --- a/antarest/study/service.py +++ b/antarest/study/service.py @@ -24,6 +24,7 @@ CommandApplicationError, IncorrectPathError, NotAManagedStudyException, + ReferencedObjectDeletionNotAllowed, StudyDeletionNotAllowed, StudyNotFoundError, StudyTypeUnsupported, @@ -56,7 +57,7 @@ from antarest.study.business.areas.renewable_management import RenewableManager from antarest.study.business.areas.st_storage_management import STStorageManager from antarest.study.business.areas.thermal_management import ThermalManager -from antarest.study.business.binding_constraint_management import BindingConstraintManager +from antarest.study.business.binding_constraint_management import BindingConstraintManager, ConstraintFilters, LinkTerm from antarest.study.business.config_management import ConfigManager from antarest.study.business.correlation_management import CorrelationManager from antarest.study.business.district_manager import DistrictManager @@ -686,7 +687,7 @@ def create_study( id=sid, name=study_name, workspace=DEFAULT_WORKSPACE_NAME, - path=study_path, + path=str(study_path), created_at=datetime.utcnow(), updated_at=datetime.utcnow(), version=version or NEW_DEFAULT_STUDY_VERSION, @@ -1187,13 +1188,13 @@ def download_outputs( """ Download outputs Args: - study_id: study Id - output_id: output id - data: Json parameters - use_task: use task or not - filetype: type of returning file, - tmp_export_file: temporary file (if use_task is false), - params: request parameters + study_id: study ID. + output_id: output ID. + data: Json parameters. + use_task: use task or not. + filetype: type of returning file,. + tmp_export_file: temporary file (if `use_task` is false),. + params: request parameters. Returns: CSV content file @@ -1202,35 +1203,33 @@ def download_outputs( study = self.get_study(study_id) assert_permission(params.user, study, StudyPermissionType.READ) self._assert_study_unarchived(study) - logger.info( - f"Study {study_id} output download asked by {params.get_user_id()}", - ) + logger.info(f"Study {study_id} output download asked by {params.get_user_id()}") if use_task: logger.info(f"Exporting {output_id} from study {study_id}") export_name = f"Study filtered output {study.name}/{output_id} export" export_file_download = self.file_transfer_manager.request_download( - f"{study.name}-{study_id}-{output_id}_filtered.{'tar.gz' if filetype == ExportFormat.TAR_GZ else 'zip'}", + f"{study.name}-{study_id}-{output_id}_filtered{filetype.suffix}", export_name, params.user, ) export_path = Path(export_file_download.path) export_id = export_file_download.id - def export_task(notifier: TaskUpdateNotifier) -> TaskResult: + def export_task(_notifier: TaskUpdateNotifier) -> TaskResult: try: - study = self.get_study(study_id) - stopwatch = StopWatch() - matrix = StudyDownloader.build( - self.storage_service.get_storage(study).get_raw(study), + _study = self.get_study(study_id) + _stopwatch = StopWatch() + _matrix = StudyDownloader.build( + self.storage_service.get_storage(_study).get_raw(_study), output_id, data, ) - stopwatch.log_elapsed( + _stopwatch.log_elapsed( lambda x: logger.info(f"Study {study_id} filtered output {output_id} built in {x}s") ) - StudyDownloader.export(matrix, filetype, export_path) - stopwatch.log_elapsed( + StudyDownloader.export(_matrix, filetype, export_path) + _stopwatch.log_elapsed( lambda x: logger.info(f"Study {study_id} filtered output {output_id} exported in {x}s") ) self.file_transfer_manager.set_ready(export_id) @@ -1240,7 +1239,7 @@ def export_task(notifier: TaskUpdateNotifier) -> TaskResult: ) except Exception as e: self.file_transfer_manager.fail(export_id, str(e)) - raise e + raise task_id = self.task_service.add_task( export_task, @@ -1265,17 +1264,18 @@ def export_task(notifier: TaskUpdateNotifier) -> TaskResult: stopwatch.log_elapsed( lambda x: logger.info(f"Study {study_id} filtered output {output_id} exported in {x}s") ) - return FileResponse( - tmp_export_file, - headers=( - {"Content-Disposition": "inline"} - if filetype == ExportFormat.JSON - else { - "Content-Disposition": f'attachment; filename="output-{output_id}.{"tar.gz" if filetype == ExportFormat.TAR_GZ else "zip"}' - } - ), - media_type=filetype, - ) + + if filetype == ExportFormat.JSON: + headers = {"Content-Disposition": "inline"} + elif filetype == ExportFormat.TAR_GZ: + headers = {"Content-Disposition": f'attachment; filename="output-{output_id}.tar.gz'} + elif filetype == ExportFormat.ZIP: + headers = {"Content-Disposition": f'attachment; filename="output-{output_id}.zip'} + else: # pragma: no cover + raise NotImplementedError(f"Export format {filetype} is not supported") + + return FileResponse(tmp_export_file, headers=headers, media_type=filetype) + else: json_response = json.dumps( matrix.dict(), @@ -1314,26 +1314,20 @@ def set_sim_reference( params: RequestParameters, ) -> None: """ - Set simulation as the reference output + Set simulation as the reference output. + Args: - study_id: study Id - output_id: output id - status: state of the reference status + study_id: study ID. + output_id: The ID of the output to set as reference. + status: state of the reference status. params: request parameters - - Returns: None - """ study = self.get_study(study_id) assert_permission(params.user, study, StudyPermissionType.WRITE) self._assert_study_unarchived(study) logger.info( - "output %s set by user %s as reference (%b) for study %s", - output_id, - params.get_user_id(), - status, - study_id, + f"output {output_id} set by user {params.get_user_id()} as reference ({status}) for study {study_id}" ) self.storage_service.get_storage(study).set_reference_output(study, output_id, status) @@ -1855,9 +1849,27 @@ def update_thermal_cluster_metadata( return self.areas.update_thermal_cluster_metadata(study, area_id, clusters_metadata) def delete_area(self, uuid: str, area_id: str, params: RequestParameters) -> None: + """ + Delete area from study if it is not referenced by a binding constraint, + otherwise raise an HTTP 403 Forbidden error. + + Args: + uuid: The study ID. + area_id: The area ID to delete. + params: The request parameters used to check user permissions. + + Raises: + ReferencedObjectDeletionNotAllowed: If the area is referenced by a binding constraint. + """ study = self.get_study(uuid) assert_permission(params.user, study, StudyPermissionType.WRITE) self._assert_study_unarchived(study) + referencing_binding_constraints = self.binding_constraint_manager.get_binding_constraints( + study, ConstraintFilters(area_name=area_id) + ) + if referencing_binding_constraints: + binding_ids = [bc.id for bc in referencing_binding_constraints] + raise ReferencedObjectDeletionNotAllowed(area_id, binding_ids, object_type="Area") self.areas.delete_area(study, area_id) self.event_bus.push( Event( @@ -1874,9 +1886,29 @@ def delete_link( area_to: str, params: RequestParameters, ) -> None: + """ + Delete link from study if it is not referenced by a binding constraint, + otherwise raise an HTTP 403 Forbidden error. + + Args: + uuid: The study ID. + area_from: The area from which the link starts. + area_to: The area to which the link ends. + params: The request parameters used to check user permissions. + + Raises: + ReferencedObjectDeletionNotAllowed: If the link is referenced by a binding constraint. + """ study = self.get_study(uuid) assert_permission(params.user, study, StudyPermissionType.WRITE) self._assert_study_unarchived(study) + link_id = LinkTerm(area1=area_from, area2=area_to).generate_id() + referencing_binding_constraints = self.binding_constraint_manager.get_binding_constraints( + study, ConstraintFilters(link_id=link_id) + ) + if referencing_binding_constraints: + binding_ids = [bc.id for bc in referencing_binding_constraints] + raise ReferencedObjectDeletionNotAllowed(link_id, binding_ids, object_type="Link") self.links.delete_link(study, area_from, area_to) self.event_bus.push( Event( @@ -2518,3 +2550,26 @@ def get_matrix_with_index_and_header( ) return df_matrix + + def asserts_no_thermal_in_binding_constraints( + self, study: Study, area_id: str, cluster_ids: t.Sequence[str] + ) -> None: + """ + Check that no cluster is referenced in a binding constraint, otherwise raise an HTTP 403 Forbidden error. + + Args: + study: input study for which an update is to be committed + area_id: area ID to be checked + cluster_ids: IDs of the thermal clusters to be checked + + Raises: + ReferencedObjectDeletionNotAllowed: if a cluster is referenced in a binding constraint + """ + + for cluster_id in cluster_ids: + ref_bcs = self.binding_constraint_manager.get_binding_constraints( + study, ConstraintFilters(cluster_id=f"{area_id}.{cluster_id}") + ) + if ref_bcs: + binding_ids = [bc.id for bc in ref_bcs] + raise ReferencedObjectDeletionNotAllowed(cluster_id, binding_ids, object_type="Cluster") diff --git a/antarest/study/storage/rawstudy/model/filesystem/common/area_matrix_list.py b/antarest/study/storage/rawstudy/model/filesystem/common/area_matrix_list.py index 0191b866d1..8bc1b5a497 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/common/area_matrix_list.py +++ b/antarest/study/storage/rawstudy/model/filesystem/common/area_matrix_list.py @@ -6,6 +6,8 @@ from antarest.study.storage.rawstudy.model.filesystem.inode import TREE, INode from antarest.study.storage.rawstudy.model.filesystem.matrix.input_series_matrix import InputSeriesMatrix +TXT_PATTERN = "*.txt" + class AreaMatrixList(FolderNode): """ @@ -60,14 +62,17 @@ def build(self) -> TREE: A dictionary of child nodes, where the key is the matrix file name and the value is the corresponding :class:`InputSeriesMatrix` node. """ - children: TREE = { - f"{self.prefix}{area}": self.matrix_class( - self.context, - self.config.next_file(f"{self.prefix}{area}.txt"), - **self.additional_matrix_params, + children: TREE = {} + if self.prefix: # Corresponds to the inputs + files = self.config.area_names() + else: # Corresponds to the outputs + files = [d.with_suffix("").name for d in self.config.path.iterdir()] + + for file in files: + name = f"{self.prefix}{file}" + children[name] = self.matrix_class( + self.context, self.config.next_file(f"{name}.txt"), **self.additional_matrix_params ) - for area in self.config.area_names() - } return children @@ -105,7 +110,7 @@ def build(self) -> TREE: """Builds the folder structure and creates child nodes representing each matrix file.""" return { file.stem: self.matrix_class(self.context, self.config.next_file(file.name)) - for file in self.config.path.glob("*.txt") + for file in self.config.path.glob(TXT_PATTERN) } @@ -124,15 +129,28 @@ def __init__( def build(self) -> TREE: # Note that cluster IDs are case-insensitive, but series IDs are in lower case. # For instance, if your cluster ID is "Base", then the series ID will be "base". - series_ids = map(str.lower, self.config.get_thermal_ids(self.area)) - children: TREE = { - series_id: self.matrix_class(self.context, self.config.next_file(f"{series_id}.txt")) - for series_id in series_ids + series_files = self.config.path.glob(TXT_PATTERN) + return { + series.stem: self.matrix_class(self.context, self.config.next_file(series.name)) for series in series_files } - return children class AreaMultipleMatrixList(FolderNode): + """ + Node representing a folder structure containing multiple matrix files for each area. + + Example of tree structure: + + .. code-block:: text + + ts-numbers/thermal + β”œβ”€β”€ at + β”‚ β”œβ”€β”€ cluster_gas.txt + β”‚ └── cluster2_gas.txt + └── be + └── cluster_nuclear.txt + """ + def __init__( self, context: ContextServer, @@ -156,6 +174,7 @@ def __init__( self.matrix_class = matrix_class def build(self) -> TREE: + folders = [d.name for d in self.config.path.iterdir() if d.is_dir()] children: TREE = { area: self.klass( self.context, @@ -163,6 +182,6 @@ def build(self) -> TREE: area, self.matrix_class, ) - for area in self.config.area_names() + for area in folders } return children diff --git a/antarest/study/storage/rawstudy/model/filesystem/matrix/input_series_matrix.py b/antarest/study/storage/rawstudy/model/filesystem/matrix/input_series_matrix.py index 9be78332f3..4cda0b4027 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/matrix/input_series_matrix.py +++ b/antarest/study/storage/rawstudy/model/filesystem/matrix/input_series_matrix.py @@ -11,6 +11,7 @@ from antarest.core.utils.utils import StopWatch from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig from antarest.study.storage.rawstudy.model.filesystem.context import ContextServer +from antarest.study.storage.rawstudy.model.filesystem.folder_node import ChildNotFoundError from antarest.study.storage.rawstudy.model.filesystem.matrix.matrix import MatrixFrequency, MatrixNode logger = logging.getLogger(__name__) @@ -49,23 +50,28 @@ def parse( try: # sourcery skip: extract-method stopwatch = StopWatch() - if self.get_link_path().exists(): - link = self.get_link_path().read_text() + link_path = self.get_link_path() + if link_path.exists(): + link = link_path.read_text() matrix_json = self.context.resolver.resolve(link) matrix_json = cast(JSON, matrix_json) - matrix: pd.DataFrame = pd.DataFrame( - data=matrix_json["data"], - columns=matrix_json["columns"], - index=matrix_json["index"], - ) + matrix: pd.DataFrame = pd.DataFrame(**matrix_json) else: - matrix = pd.read_csv( - file_path, - sep="\t", - dtype=float, - header=None, - float_precision="legacy", - ) + try: + matrix = pd.read_csv( + file_path, + sep="\t", + dtype=float, + header=None, + float_precision="legacy", + ) + except FileNotFoundError as e: + # Raise 404 'Not Found' if the TSV file is not found + logger.warning(f"Matrix file'{file_path}' not found") + study_id = self.config.study_id + relpath = file_path.relative_to(self.config.study_path).as_posix() + raise ChildNotFoundError(f"File '{relpath}' not found in the study '{study_id}'") from e + stopwatch.log_elapsed(lambda x: logger.info(f"Matrix parsed in {x}s")) matrix.dropna(how="any", axis=1, inplace=True) if return_dataframe: diff --git a/antarest/study/storage/rawstudy/model/filesystem/matrix/output_series_matrix.py b/antarest/study/storage/rawstudy/model/filesystem/matrix/output_series_matrix.py index 6f82eaab34..dbc3c4385e 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/matrix/output_series_matrix.py +++ b/antarest/study/storage/rawstudy/model/filesystem/matrix/output_series_matrix.py @@ -50,6 +50,7 @@ def get_lazy_content( depth: int = -1, expanded: bool = False, ) -> str: + # noinspection SpellCheckingInspection return f"matrixfile://{self.config.path.name}" def parse_dataframe( @@ -58,14 +59,21 @@ def parse_dataframe( tmp_dir: Any = None, ) -> DataFrame: file_path = file_path or self.config.path - df = pd.read_csv( - file_path, - sep="\t", - skiprows=4, - header=[0, 1, 2], - na_values="N/A", - float_precision="legacy", - ) + try: + df = pd.read_csv( + file_path, + sep="\t", + skiprows=4, + header=[0, 1, 2], + na_values="N/A", + float_precision="legacy", + ) + except FileNotFoundError as e: + # Raise 404 'Not Found' if the TSV file is not found + logger.warning(f"Matrix file'{file_path}' not found") + study_id = self.config.study_id + relpath = file_path.relative_to(self.config.study_path).as_posix() + raise ChildNotFoundError(f"File '{relpath}' not found in the study '{study_id}'") from e if tmp_dir: tmp_dir.cleanup() diff --git a/antarest/study/storage/rawstudy/model/filesystem/root/input/load/__init__.py b/antarest/study/storage/rawstudy/model/filesystem/root/input/commons/__init__.py similarity index 100% rename from antarest/study/storage/rawstudy/model/filesystem/root/input/load/__init__.py rename to antarest/study/storage/rawstudy/model/filesystem/root/input/commons/__init__.py diff --git a/antarest/study/storage/rawstudy/model/filesystem/root/input/commons/prepro_series.py b/antarest/study/storage/rawstudy/model/filesystem/root/input/commons/prepro_series.py new file mode 100644 index 0000000000..b6e8f79c54 --- /dev/null +++ b/antarest/study/storage/rawstudy/model/filesystem/root/input/commons/prepro_series.py @@ -0,0 +1,52 @@ +from antarest.study.storage.rawstudy.model.filesystem.common.area_matrix_list import AreaMatrixList +from antarest.study.storage.rawstudy.model.filesystem.common.prepro import InputPrepro +from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig +from antarest.study.storage.rawstudy.model.filesystem.context import ContextServer +from antarest.study.storage.rawstudy.model.filesystem.folder_node import FolderNode +from antarest.study.storage.rawstudy.model.filesystem.inode import TREE +from antarest.study.storage.rawstudy.model.filesystem.matrix.constants import default_scenario_hourly + + +class InputPreproSeries(FolderNode): + def __init__(self, context: ContextServer, config: FileStudyTreeConfig, prefix: str): + """ + Represents a folder structure, which contains a "prepro" and a time series structure. + + Example of tree structure: + + .. code-block:: text + + input/load/ + β”œβ”€β”€ prepro + β”‚ β”œβ”€β”€ correlation.ini + β”‚ β”œβ”€β”€ store_in + β”‚ β”‚ β”œβ”€β”€ conversion.txt + β”‚ β”‚ β”œβ”€β”€ data.txt + β”‚ β”‚ β”œβ”€β”€ k.txt + β”‚ β”‚ β”œβ”€β”€ settings.ini + β”‚ β”‚ └── translation.txt + β”‚ └── store_out + β”‚ β”œβ”€β”€ conversion.txt + β”‚ β”œβ”€β”€ data.txt + β”‚ β”œβ”€β”€ k.txt + β”‚ β”œβ”€β”€ settings.ini + β”‚ └── translation.txt + └── series + β”œβ”€β”€ load_store_in.txt + └── load_store_out.txt + """ + + super().__init__(context, config) + self.prefix = prefix + + def build(self) -> TREE: + children: TREE = { + "prepro": InputPrepro(self.context, self.config.next_file("prepro")), + "series": AreaMatrixList( + self.context, + self.config.next_file("series"), + prefix=self.prefix, + additional_matrix_params={"default_empty": default_scenario_hourly}, + ), + } + return children diff --git a/antarest/study/storage/rawstudy/model/filesystem/root/input/input.py b/antarest/study/storage/rawstudy/model/filesystem/root/input/input.py index 91752b440e..fe22590f62 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/root/input/input.py +++ b/antarest/study/storage/rawstudy/model/filesystem/root/input/input.py @@ -5,16 +5,14 @@ from antarest.study.storage.rawstudy.model.filesystem.root.input.bindingconstraints.bindingcontraints import ( BindingConstraints, ) +from antarest.study.storage.rawstudy.model.filesystem.root.input.commons.prepro_series import InputPreproSeries from antarest.study.storage.rawstudy.model.filesystem.root.input.hydro.hydro import InputHydro from antarest.study.storage.rawstudy.model.filesystem.root.input.link.link import InputLink -from antarest.study.storage.rawstudy.model.filesystem.root.input.load.load import InputLoad from antarest.study.storage.rawstudy.model.filesystem.root.input.miscgen.miscgen import InputMiscGen from antarest.study.storage.rawstudy.model.filesystem.root.input.renewables.renewable import ClusteredRenewables from antarest.study.storage.rawstudy.model.filesystem.root.input.reserves.reserves import InputReserves -from antarest.study.storage.rawstudy.model.filesystem.root.input.solar.solar import InputSolar from antarest.study.storage.rawstudy.model.filesystem.root.input.st_storage.st_storage import InputSTStorage from antarest.study.storage.rawstudy.model.filesystem.root.input.thermal.thermal import InputThermal -from antarest.study.storage.rawstudy.model.filesystem.root.input.wind.wind import InputWind class Input(FolderNode): @@ -31,12 +29,12 @@ def build(self) -> TREE: "bindingconstraints": BindingConstraints(self.context, config.next_file("bindingconstraints")), "hydro": InputHydro(self.context, config.next_file("hydro")), "links": InputLink(self.context, config.next_file("links")), - "load": InputLoad(self.context, config.next_file("load")), + "load": InputPreproSeries(self.context, config.next_file("load"), "load_"), "misc-gen": InputMiscGen(self.context, config.next_file("misc-gen")), "reserves": InputReserves(self.context, config.next_file("reserves")), - "solar": InputSolar(self.context, config.next_file("solar")), + "solar": InputPreproSeries(self.context, config.next_file("solar"), "solar_"), "thermal": InputThermal(self.context, config.next_file("thermal")), - "wind": InputWind(self.context, config.next_file("wind")), + "wind": InputPreproSeries(self.context, config.next_file("wind"), "wind_"), } has_renewables = config.version >= 810 and EnrModelling(config.enr_modelling) == EnrModelling.CLUSTERS diff --git a/antarest/study/storage/rawstudy/model/filesystem/root/input/link/area/capacities/capacities.py b/antarest/study/storage/rawstudy/model/filesystem/root/input/link/area/capacities/capacities.py index db07796678..ea4dd677c8 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/root/input/link/area/capacities/capacities.py +++ b/antarest/study/storage/rawstudy/model/filesystem/root/input/link/area/capacities/capacities.py @@ -12,7 +12,7 @@ def __init__( config: FileStudyTreeConfig, area: str, ): - FolderNode.__init__(self, context, config) + super().__init__(context, config) self.area = area def build(self) -> TREE: diff --git a/antarest/study/storage/rawstudy/model/filesystem/root/input/load/load.py b/antarest/study/storage/rawstudy/model/filesystem/root/input/load/load.py deleted file mode 100644 index 64aa92080b..0000000000 --- a/antarest/study/storage/rawstudy/model/filesystem/root/input/load/load.py +++ /dev/null @@ -1,46 +0,0 @@ -from antarest.study.storage.rawstudy.model.filesystem.common.area_matrix_list import AreaMatrixList -from antarest.study.storage.rawstudy.model.filesystem.common.prepro import InputPrepro -from antarest.study.storage.rawstudy.model.filesystem.folder_node import FolderNode -from antarest.study.storage.rawstudy.model.filesystem.inode import TREE -from antarest.study.storage.rawstudy.model.filesystem.matrix.constants import default_scenario_hourly - - -class InputLoad(FolderNode): - """ - Represents a folder structure, which contains a "prepro" and a time series structure. - - Example of tree structure: - - .. code-block:: text - - input/load/ - β”œβ”€β”€ prepro - β”‚ β”œβ”€β”€ correlation.ini - β”‚ β”œβ”€β”€ store_in - β”‚ β”‚ β”œβ”€β”€ conversion.txt - β”‚ β”‚ β”œβ”€β”€ data.txt - β”‚ β”‚ β”œβ”€β”€ k.txt - β”‚ β”‚ β”œβ”€β”€ settings.ini - β”‚ β”‚ └── translation.txt - β”‚ └── store_out - β”‚ β”œβ”€β”€ conversion.txt - β”‚ β”œβ”€β”€ data.txt - β”‚ β”œβ”€β”€ k.txt - β”‚ β”œβ”€β”€ settings.ini - β”‚ └── translation.txt - └── series - β”œβ”€β”€ load_store_in.txt - └── load_store_out.txt - """ - - def build(self) -> TREE: - children: TREE = { - "prepro": InputPrepro(self.context, self.config.next_file("prepro")), - "series": AreaMatrixList( - self.context, - self.config.next_file("series"), - prefix="load_", - additional_matrix_params={"default_empty": default_scenario_hourly}, - ), - } - return children diff --git a/antarest/study/storage/rawstudy/model/filesystem/root/input/renewables/clusters.py b/antarest/study/storage/rawstudy/model/filesystem/root/input/renewables/clusters.py index 23592eb764..8866a46551 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/root/input/renewables/clusters.py +++ b/antarest/study/storage/rawstudy/model/filesystem/root/input/renewables/clusters.py @@ -31,7 +31,7 @@ def __init__( config: FileStudyTreeConfig, area: str, ): - FolderNode.__init__(self, context, config) + super().__init__(context, config) self.area = area def build(self) -> TREE: diff --git a/antarest/study/storage/rawstudy/model/filesystem/root/input/solar/__init__.py b/antarest/study/storage/rawstudy/model/filesystem/root/input/solar/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/antarest/study/storage/rawstudy/model/filesystem/root/input/solar/solar.py b/antarest/study/storage/rawstudy/model/filesystem/root/input/solar/solar.py deleted file mode 100644 index 057118b2b3..0000000000 --- a/antarest/study/storage/rawstudy/model/filesystem/root/input/solar/solar.py +++ /dev/null @@ -1,46 +0,0 @@ -from antarest.study.storage.rawstudy.model.filesystem.common.area_matrix_list import AreaMatrixList -from antarest.study.storage.rawstudy.model.filesystem.common.prepro import InputPrepro -from antarest.study.storage.rawstudy.model.filesystem.folder_node import FolderNode -from antarest.study.storage.rawstudy.model.filesystem.inode import TREE -from antarest.study.storage.rawstudy.model.filesystem.matrix.constants import default_scenario_hourly - - -class InputSolar(FolderNode): - """ - Represents a folder structure, which contains a "prepro" and a time series structure. - - Example of tree structure: - - .. code-block:: text - - input/solar/ - β”œβ”€β”€ prepro - β”‚ β”œβ”€β”€ correlation.ini - β”‚ β”œβ”€β”€ store_in - β”‚ β”‚ β”œβ”€β”€ conversion.txt - β”‚ β”‚ β”œβ”€β”€ data.txt - β”‚ β”‚ β”œβ”€β”€ k.txt - β”‚ β”‚ β”œβ”€β”€ settings.ini - β”‚ β”‚ └── translation.txt - β”‚ └── store_out - β”‚ β”œβ”€β”€ conversion.txt - β”‚ β”œβ”€β”€ data.txt - β”‚ β”œβ”€β”€ k.txt - β”‚ β”œβ”€β”€ settings.ini - β”‚ └── translation.txt - └── series - β”œβ”€β”€ solar_store_in.txt - └── solar_store_out.txt - """ - - def build(self) -> TREE: - children: TREE = { - "prepro": InputPrepro(self.context, self.config.next_file("prepro")), - "series": AreaMatrixList( - self.context, - self.config.next_file("series"), - prefix="solar_", - additional_matrix_params={"default_empty": default_scenario_hourly}, - ), - } - return children diff --git a/antarest/study/storage/rawstudy/model/filesystem/root/input/thermal/cluster/area/area.py b/antarest/study/storage/rawstudy/model/filesystem/root/input/thermal/cluster/area/area.py index 233418aa05..2fed32670c 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/root/input/thermal/cluster/area/area.py +++ b/antarest/study/storage/rawstudy/model/filesystem/root/input/thermal/cluster/area/area.py @@ -14,7 +14,7 @@ def __init__( config: FileStudyTreeConfig, area: str, ): - FolderNode.__init__(self, context, config) + super().__init__(context, config) self.area = area def build(self) -> TREE: diff --git a/antarest/study/storage/rawstudy/model/filesystem/root/input/thermal/prepro/area/area.py b/antarest/study/storage/rawstudy/model/filesystem/root/input/thermal/prepro/area/area.py index 8bb88d01ac..afc70243ce 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/root/input/thermal/prepro/area/area.py +++ b/antarest/study/storage/rawstudy/model/filesystem/root/input/thermal/prepro/area/area.py @@ -14,7 +14,7 @@ def __init__( config: FileStudyTreeConfig, area: str, ): - FolderNode.__init__(self, context, config) + super().__init__(context, config) self.area = area def build(self) -> TREE: diff --git a/antarest/study/storage/rawstudy/model/filesystem/root/input/thermal/series/area/area.py b/antarest/study/storage/rawstudy/model/filesystem/root/input/thermal/series/area/area.py index 31efc9a0b5..db10297818 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/root/input/thermal/series/area/area.py +++ b/antarest/study/storage/rawstudy/model/filesystem/root/input/thermal/series/area/area.py @@ -14,7 +14,7 @@ def __init__( config: FileStudyTreeConfig, area: str, ): - FolderNode.__init__(self, context, config) + super().__init__(context, config) self.area = area def build(self) -> TREE: diff --git a/antarest/study/storage/rawstudy/model/filesystem/root/input/wind/__init__.py b/antarest/study/storage/rawstudy/model/filesystem/root/input/wind/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/antarest/study/storage/rawstudy/model/filesystem/root/input/wind/wind.py b/antarest/study/storage/rawstudy/model/filesystem/root/input/wind/wind.py deleted file mode 100644 index 1727e37523..0000000000 --- a/antarest/study/storage/rawstudy/model/filesystem/root/input/wind/wind.py +++ /dev/null @@ -1,46 +0,0 @@ -from antarest.study.storage.rawstudy.model.filesystem.common.area_matrix_list import AreaMatrixList -from antarest.study.storage.rawstudy.model.filesystem.common.prepro import InputPrepro -from antarest.study.storage.rawstudy.model.filesystem.folder_node import FolderNode -from antarest.study.storage.rawstudy.model.filesystem.inode import TREE -from antarest.study.storage.rawstudy.model.filesystem.matrix.constants import default_scenario_hourly - - -class InputWind(FolderNode): - """ - Represents a folder structure, which contains a "prepro" and a time series structure. - - Example of tree structure: - - .. code-block:: text - - input/wind/ - β”œβ”€β”€ prepro - β”‚ β”œβ”€β”€ correlation.ini - β”‚ β”œβ”€β”€ store_in - β”‚ β”‚ β”œβ”€β”€ conversion.txt - β”‚ β”‚ β”œβ”€β”€ data.txt - β”‚ β”‚ β”œβ”€β”€ k.txt - β”‚ β”‚ β”œβ”€β”€ settings.ini - β”‚ β”‚ └── translation.txt - β”‚ └── store_out - β”‚ β”œβ”€β”€ conversion.txt - β”‚ β”œβ”€β”€ data.txt - β”‚ β”œβ”€β”€ k.txt - β”‚ β”œβ”€β”€ settings.ini - β”‚ └── translation.txt - └── series - β”œβ”€β”€ wind_store_in.txt - └── wind_store_out.txt - """ - - def build(self) -> TREE: - children: TREE = { - "prepro": InputPrepro(self.context, self.config.next_file("prepro")), - "series": AreaMatrixList( - self.context, - self.config.next_file("series"), - prefix="wind_", - additional_matrix_params={"default_empty": default_scenario_hourly}, - ), - } - return children diff --git a/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/common/area.py b/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/common/area.py index dc5726554d..9aea14533e 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/common/area.py +++ b/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/common/area.py @@ -12,76 +12,22 @@ def __init__( context: ContextServer, config: FileStudyTreeConfig, area: str, - mc_all: bool = True, ): - FolderNode.__init__(self, context, config) + super().__init__(context, config) self.area = area - self.mc_all = mc_all def build(self) -> TREE: children: TREE = {} - - # filters = self.config.get_filters_synthesis(self.area) - # todo get the config related to this output (now this may fail if input has changed since the launch) - freq: MatrixFrequency for freq in MatrixFrequency: - if self.mc_all: - children[f"id-{freq}"] = AreaOutputSeriesMatrix( - self.context, - self.config.next_file(f"id-{freq}.txt"), - freq, - self.area, - ) - - children[f"values-{freq}"] = AreaOutputSeriesMatrix( - self.context, - self.config.next_file(f"values-{freq}.txt"), - freq, - self.area, - ) - - # has_thermal_clusters = len(self.config.get_thermal_ids(self.area)) > 0 - # todo get the config related to this output (now this may fail if input has changed since the launch) - has_thermal_clusters = True - - if has_thermal_clusters: - children[f"details-{freq}"] = AreaOutputSeriesMatrix( - self.context, - self.config.next_file(f"details-{freq}.txt"), - freq, - self.area, - ) - - # has_enr_clusters = self.config.enr_modelling == EnrModelling.CLUSTERS.value and - # len(self.config.get_renewable_ids(self.area)) > 0 - # todo get the config related to this output (now this may fail if input has changed since the launch) - has_enr_clusters = True - - if has_enr_clusters: - children[f"details-res-{freq}"] = AreaOutputSeriesMatrix( - self.context, - self.config.next_file(f"details-res-{freq}.txt"), - freq, - self.area, - ) - - # add condition len(self.config.get_short_term_storage_names(self.area)) > 0 to - # has_short_term_storage boolean - # todo get the config related to this output (now this may fail if input has changed since the launch) - - has_short_term_storage = self.config.version >= 860 - if has_short_term_storage: - children[f"details-STstorage-{freq}"] = AreaOutputSeriesMatrix( - self.context, - self.config.next_file(f"details-STstorage-{freq}.txt"), - freq, - self.area, - ) - - return { - child: children[child] - for child in children - # this takes way too long in zip mode... see above todo to prevent needing this - # if cast(AreaOutputSeriesMatrix, children[child]).file_exists() - } + for output_type in ["id", "values", "details", "details-res", "details-STstorage"]: + file_name = f"{output_type}-{freq}.txt" + if (self.config.path / file_name).exists(): + children[f"{output_type}-{freq}"] = AreaOutputSeriesMatrix( + self.context, + self.config.next_file(file_name), + freq, + self.area, + ) + + return children diff --git a/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/common/areas.py b/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/common/areas.py index ac35597dfa..880a313dd8 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/common/areas.py +++ b/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/common/areas.py @@ -15,27 +15,20 @@ def __init__( self, context: ContextServer, config: FileStudyTreeConfig, - mc_all: bool = True, ) -> None: super().__init__(context, config) - self.mc_all = mc_all def build(self) -> TREE: - children: TREE = { - a: Area( - self.context, - self.config.next_file(a), - area=a, - mc_all=self.mc_all, - ) - for a in self.config.area_names() - } + areas = set() + sets = set() + for file in self.config.path.iterdir(): + name = file.stem + if "@" in name: + sets.add(name) + else: + areas.add(name) + children: TREE = {a: Area(self.context, self.config.next_file(a), area=a) for a in areas} - for s in self.config.set_names(): - children[f"@ {s}"] = Set( - self.context, - self.config.next_file(f"@ {s}"), - set=s, - mc_all=self.mc_all, - ) + for s in sets: + children[s] = Set(self.context, self.config.next_file(s), set=s) return children diff --git a/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/common/binding_const.py b/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/common/binding_const.py index ec1a3e2f63..822c5c8d93 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/common/binding_const.py +++ b/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/common/binding_const.py @@ -8,21 +8,13 @@ class OutputSimulationBindingConstraintItem(FolderNode): def build(self) -> TREE: - # filters = self.config.get_filters_synthesis(self.area, self.link) - # todo get the config related to this output (now this may fail if input has changed since the launch) - - freq: MatrixFrequency + existing_files = [d.stem.replace("binding-constraints-", "") for d in self.config.path.iterdir()] children: TREE = { f"binding-constraints-{freq}": BindingConstraintOutputSeriesMatrix( self.context, self.config.next_file(f"binding-constraints-{freq}.txt"), - freq, + MatrixFrequency(freq), ) - for freq in MatrixFrequency - } - return { - child: children[child] - for child in children - # this takes way too long... see above todo to prevent needing this - # if cast(LinkOutputSeriesMatrix, children[child]).file_exists() + for freq in existing_files } + return children diff --git a/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/common/link.py b/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/common/link.py index 5fc82dc57f..7b1fdd3eb1 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/common/link.py +++ b/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/common/link.py @@ -13,40 +13,24 @@ def __init__( config: FileStudyTreeConfig, area: str, link: str, - mc_all: bool = True, ): - FolderNode.__init__(self, context, config) + super().__init__(context, config) self.area = area self.link = link - self.mc_all = mc_all def build(self) -> TREE: children: TREE = {} - - # filters = self.config.get_filters_synthesis(self.area, self.link) - # todo get the config related to this output (now this may fail if input has changed since the launch) - freq: MatrixFrequency for freq in MatrixFrequency: - children[f"values-{freq}"] = LinkOutputSeriesMatrix( - self.context, - self.config.next_file(f"values-{freq}.txt"), - freq, - self.area, - self.link, - ) - if self.mc_all: - children[f"id-{freq}"] = LinkOutputSeriesMatrix( - self.context, - self.config.next_file(f"id-{freq}.txt"), - freq, - self.area, - self.link, - ) + for output_type in ["id", "values"]: + file_name = f"{output_type}-{freq}.txt" + if (self.config.path / file_name).exists(): + children[f"{output_type}-{freq}"] = LinkOutputSeriesMatrix( + self.context, + self.config.next_file(file_name), + freq, + self.area, + self.link, + ) - return { - child: children[child] - for child in children - # this takes way too long... see above todo to prevent needing this - # if cast(LinkOutputSeriesMatrix, children[child]).file_exists() - } + return children diff --git a/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/common/links.py b/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/common/links.py index 57cc0af3e9..1a74e89241 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/common/links.py +++ b/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/common/links.py @@ -1,3 +1,5 @@ +import typing as t + from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig from antarest.study.storage.rawstudy.model.filesystem.context import ContextServer from antarest.study.storage.rawstudy.model.filesystem.folder_node import FolderNode @@ -12,23 +14,19 @@ def __init__( self, context: ContextServer, config: FileStudyTreeConfig, - area: str, - mc_all: bool, + area_from: str, + link_names: t.List[str], ): - FolderNode.__init__(self, context, config) - self.area = area - self.mc_all = mc_all + super().__init__(context, config) + self.area_from = area_from + self.link_names = link_names def build(self) -> TREE: children: TREE = {} - for link in self.config.get_links(self.area): - name = f"{self.area} - {link}" + for link_name in self.link_names: + link = link_name.split(" - ")[1] children[link] = OutputSimulationLinkItem( - self.context, - self.config.next_file(name), - self.area, - link, - mc_all=self.mc_all, + self.context, self.config.next_file(link_name), self.area_from, link ) return children @@ -38,15 +36,16 @@ def __init__( self, context: ContextServer, config: FileStudyTreeConfig, - mc_all: bool = True, ): super().__init__(context, config) - self.mc_all = mc_all def build(self) -> TREE: children: TREE = {} - - for area in self.config.area_names(): - children[area] = _OutputSimulationModeMcAllLinksBis(self.context, self.config, area, self.mc_all) + links = [d.stem for d in self.config.path.iterdir()] + areas: t.Dict[str, t.List[str]] = {} + for link in links: + areas.setdefault(link.split(" - ")[0], []).append(link) + for area_from, link_names in areas.items(): + children[area_from] = _OutputSimulationModeMcAllLinksBis(self.context, self.config, area_from, link_names) return children diff --git a/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/common/set.py b/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/common/set.py index 7b32fc5844..367edb8442 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/common/set.py +++ b/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/common/set.py @@ -12,38 +12,22 @@ def __init__( context: ContextServer, config: FileStudyTreeConfig, set: str, - mc_all: bool = True, ): - FolderNode.__init__(self, context, config) + super().__init__(context, config) self.set = set - self.mc_all = mc_all def build(self) -> TREE: children: TREE = {} - - # filters = self.config.get_filters_synthesis(self.set) - # todo get the config related to this output (now this may fail if input has changed since the launch) - freq: MatrixFrequency for freq in MatrixFrequency: - if self.mc_all: - children[f"id-{freq.value}"] = AreaOutputSeriesMatrix( - self.context, - self.config.next_file(f"id-{freq.value}.txt"), - freq, - self.set, - ) - - children[f"values-{freq.value}"] = AreaOutputSeriesMatrix( - self.context, - self.config.next_file(f"values-{freq.value}.txt"), - freq, - self.set, - ) + for output_type in ["id", "values"]: + file_name = f"{output_type}-{freq}.txt" + if (self.config.path / file_name).exists(): + children[f"{output_type}-{freq}"] = AreaOutputSeriesMatrix( + self.context, + self.config.next_file(file_name), + freq, + self.set, + ) - return { - child: children[child] - for child in children - # this takes way too long... see above todo to prevent needing this - # if cast(AreaOutputSeriesMatrix, children[child]).file_exists() - } + return children diff --git a/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/mcall/mcall.py b/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/common/utils.py similarity index 58% rename from antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/mcall/mcall.py rename to antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/common/utils.py index 212c40cf9d..c59a5b22b6 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/mcall/mcall.py +++ b/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/common/utils.py @@ -13,15 +13,20 @@ OutputSimulationModeMcAllGrid, ) +OUTPUT_MAPPING = { + "areas": OutputSimulationAreas, + "grid": OutputSimulationModeMcAllGrid, + "links": OutputSimulationLinks, + "binding_constraints": OutputSimulationBindingConstraintItem, +} -class OutputSimulationModeMcAll(FolderNode): + +class OutputSimulationModeCommon(FolderNode): def build(self) -> TREE: - children: TREE = { - "areas": OutputSimulationAreas(self.context, self.config.next_file("areas")), - "grid": OutputSimulationModeMcAllGrid(self.context, self.config.next_file("grid")), - "links": OutputSimulationLinks(self.context, self.config.next_file("links")), - "binding_constraints": OutputSimulationBindingConstraintItem( - self.context, self.config.next_file("binding_constraints") - ), - } + if not self.config.output_path: + return {} + children: TREE = {} + for key, simulation_class in OUTPUT_MAPPING.items(): + if (self.config.path / key).exists(): + children[key] = simulation_class(self.context, self.config.next_file(key)) return children diff --git a/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/economy.py b/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/economy.py index 15d648c205..c1a14e5527 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/economy.py +++ b/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/economy.py @@ -2,8 +2,8 @@ from antarest.study.storage.rawstudy.model.filesystem.context import ContextServer from antarest.study.storage.rawstudy.model.filesystem.folder_node import FolderNode from antarest.study.storage.rawstudy.model.filesystem.inode import TREE -from antarest.study.storage.rawstudy.model.filesystem.root.output.simulation.mode.mcall.mcall import ( - OutputSimulationModeMcAll, +from antarest.study.storage.rawstudy.model.filesystem.root.output.simulation.mode.common.utils import ( + OutputSimulationModeCommon, ) from antarest.study.storage.rawstudy.model.filesystem.root.output.simulation.mode.mcind.mcind import ( OutputSimulationModeMcInd, @@ -17,17 +17,16 @@ def __init__( config: FileStudyTreeConfig, simulation: Simulation, ): - FolderNode.__init__(self, context, config) + super().__init__(context, config) self.simulation = simulation def build(self) -> TREE: children: TREE = {} - if self.simulation.by_year: children["mc-ind"] = OutputSimulationModeMcInd( self.context, self.config.next_file("mc-ind"), self.simulation ) if self.simulation.synthesis: - children["mc-all"] = OutputSimulationModeMcAll(self.context, self.config.next_file("mc-all")) + children["mc-all"] = OutputSimulationModeCommon(self.context, self.config.next_file("mc-all")) return children diff --git a/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/mcall/grid.py b/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/mcall/grid.py index e52e0991c3..6778e09b7d 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/mcall/grid.py +++ b/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/mcall/grid.py @@ -5,10 +5,8 @@ class OutputSimulationModeMcAllGrid(FolderNode): def build(self) -> TREE: - children: TREE = { - "areas": RawFileNode(self.context, self.config.next_file("areas.txt")), - "digest": RawFileNode(self.context, self.config.next_file("digest.txt")), - "links": RawFileNode(self.context, self.config.next_file("links.txt")), - "thermal": RawFileNode(self.context, self.config.next_file("thermal.txt")), - } + files = [d.stem for d in self.config.path.iterdir()] + children: TREE = {} + for file in files: + children[file] = RawFileNode(self.context, self.config.next_file(f"{file}.txt")) return children diff --git a/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/mcind/mcind.py b/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/mcind/mcind.py index 8007eddd91..a69eaf5497 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/mcind/mcind.py +++ b/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/mcind/mcind.py @@ -2,8 +2,8 @@ from antarest.study.storage.rawstudy.model.filesystem.context import ContextServer from antarest.study.storage.rawstudy.model.filesystem.folder_node import FolderNode from antarest.study.storage.rawstudy.model.filesystem.inode import TREE -from antarest.study.storage.rawstudy.model.filesystem.root.output.simulation.mode.mcind.scn import ( - OutputSimulationModeMcIndScn, +from antarest.study.storage.rawstudy.model.filesystem.root.output.simulation.mode.common.utils import ( + OutputSimulationModeCommon, ) @@ -14,14 +14,12 @@ def __init__( config: FileStudyTreeConfig, simulation: Simulation, ): - FolderNode.__init__(self, context, config) + super().__init__(context, config) self.simulation = simulation def build(self) -> TREE: children: TREE = { - str("{:05d}".format(scn)): OutputSimulationModeMcIndScn( - self.context, self.config.next_file("{:05d}".format(scn)) - ) + f"{scn:05d}": OutputSimulationModeCommon(self.context, self.config.next_file(f"{scn:05d}")) for scn in self.simulation.playlist or range(1, self.simulation.nbyears + 1) } return children diff --git a/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/mcind/scn.py b/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/mcind/scn.py deleted file mode 100644 index ec9caa989e..0000000000 --- a/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/mcind/scn.py +++ /dev/null @@ -1,23 +0,0 @@ -from antarest.study.storage.rawstudy.model.filesystem.folder_node import FolderNode -from antarest.study.storage.rawstudy.model.filesystem.inode import TREE -from antarest.study.storage.rawstudy.model.filesystem.root.output.simulation.mode.common.areas import ( - OutputSimulationAreas, -) -from antarest.study.storage.rawstudy.model.filesystem.root.output.simulation.mode.common.binding_const import ( - OutputSimulationBindingConstraintItem, -) -from antarest.study.storage.rawstudy.model.filesystem.root.output.simulation.mode.common.links import ( - OutputSimulationLinks, -) - - -class OutputSimulationModeMcIndScn(FolderNode): - def build(self) -> TREE: - children: TREE = { - "areas": OutputSimulationAreas(self.context, self.config.next_file("areas"), mc_all=False), - "links": OutputSimulationLinks(self.context, self.config.next_file("links"), mc_all=False), - "binding_constraints": OutputSimulationBindingConstraintItem( - self.context, self.config.next_file("binding_constraints") - ), - } - return children diff --git a/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/simulation.py b/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/simulation.py index 1dfef19de5..629a8c937d 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/simulation.py +++ b/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/simulation.py @@ -1,6 +1,7 @@ from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig, Simulation from antarest.study.storage.rawstudy.model.filesystem.context import ContextServer from antarest.study.storage.rawstudy.model.filesystem.folder_node import FolderNode +from antarest.study.storage.rawstudy.model.filesystem.ini_file_node import IniFileNode from antarest.study.storage.rawstudy.model.filesystem.inode import TREE from antarest.study.storage.rawstudy.model.filesystem.raw_file_node import RawFileNode from antarest.study.storage.rawstudy.model.filesystem.root.output.simulation.about.about import OutputSimulationAbout @@ -26,7 +27,7 @@ def __init__( config: FileStudyTreeConfig, simulation: Simulation, ): - FolderNode.__init__(self, context, config) + super().__init__(context, config) self.simulation = simulation def build(self) -> TREE: @@ -39,16 +40,19 @@ def build(self) -> TREE: } if not self.simulation.error: - children["annualSystemCost"] = RawFileNode(self.context, self.config.next_file("annualSystemCost.txt")) - children["checkIntegrity"] = RawFileNode(self.context, self.config.next_file("checkIntegrity.txt")) - children["simulation-comments"] = RawFileNode( - self.context, self.config.next_file("simulation-comments.txt") - ) + for file in ["annualSystemCost", "checkIntegrity", "simulation-comments"]: + file_name = f"{file}.txt" + if (self.config.path / file_name).exists(): + children[file] = RawFileNode(self.context, self.config.next_file(file_name)) - if self.config.store_new_set: + file_name = "execution_info" + if (self.config.path / f"{file_name}.ini").exists(): + children[file_name] = IniFileNode(self.context, self.config.next_file(f"{file_name}.ini")) + + if (self.config.path / "ts-numbers").exists(): children["ts-numbers"] = OutputSimulationTsNumbers(self.context, self.config.next_file("ts-numbers")) - if self.config.archive_input_series: + if (self.config.path / "ts-generator").exists(): children["ts-generator"] = OutputSimulationTsGenerator( self.context, self.config.next_file("ts-generator") ) diff --git a/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/ts_generator/ts_generator.py b/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/ts_generator/ts_generator.py index 7721c001e2..8757debfd0 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/ts_generator/ts_generator.py +++ b/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/ts_generator/ts_generator.py @@ -56,17 +56,18 @@ def build(self) -> TREE: class OutputSimulationTsGenerator(FolderNode): def build(self) -> TREE: - children: TREE = { - "hydro": OutputSimulationTsGeneratorCustomMatrixList( + children: TREE = {} + for output_type in ["load", "solar", "wind"]: + if (self.config.path / output_type).exists(): + children[output_type] = OutputSimulationTsGeneratorSimpleMatrixList( + self.context, self.config.next_file(output_type) + ) + if (self.config.path / "hydro").exists(): + children["hydro"] = OutputSimulationTsGeneratorCustomMatrixList( self.context, self.config.next_file("hydro"), HydroMatrixList - ), - "load": OutputSimulationTsGeneratorSimpleMatrixList(self.context, self.config.next_file("load")), - "solar": OutputSimulationTsGeneratorSimpleMatrixList(self.context, self.config.next_file("solar")), - "wind": OutputSimulationTsGeneratorSimpleMatrixList(self.context, self.config.next_file("wind")), - "thermal": OutputSimulationTsGeneratorCustomMatrixList( - self.context, - self.config.next_file("thermal"), - ThermalMatrixList, - ), - } + ) + if (self.config.path / "thermal").exists(): + children["thermal"] = OutputSimulationTsGeneratorCustomMatrixList( + self.context, self.config.next_file("thermal"), ThermalMatrixList + ) return children diff --git a/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/ts_numbers/ts_numbers.py b/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/ts_numbers/ts_numbers.py index 2a74e256f9..cc458102ca 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/ts_numbers/ts_numbers.py +++ b/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/ts_numbers/ts_numbers.py @@ -57,38 +57,23 @@ class OutputSimulationTsNumbers(FolderNode): """ def build(self) -> TREE: - children: TREE = { - "hydro": AreaMatrixList( - self.context, - self.config.next_file("hydro"), - matrix_class=TsNumbersVector, - ), - "load": AreaMatrixList( - self.context, - self.config.next_file("load"), - matrix_class=TsNumbersVector, - ), - "solar": AreaMatrixList( - self.context, - self.config.next_file("solar"), - matrix_class=TsNumbersVector, - ), - "wind": AreaMatrixList( - self.context, - self.config.next_file("wind"), - matrix_class=TsNumbersVector, - ), - "thermal": AreaMultipleMatrixList( + children: TREE = {} + for output_type in ["hydro", "load", "solar", "wind"]: + if (self.config.path / output_type).exists(): + children[output_type] = AreaMatrixList( + self.context, + self.config.next_file(output_type), + matrix_class=TsNumbersVector, + ) + if (self.config.path / "bindingconstraints").exists(): + children["bindingconstraints"] = BindingConstraintMatrixList( + self.context, self.config.next_file("bindingconstraints"), matrix_class=TsNumbersVector + ) + if (self.config.path / "thermal").exists(): + children["thermal"] = AreaMultipleMatrixList( self.context, self.config.next_file("thermal"), ThermalMatrixList, TsNumbersVector, - ), - } - if self.config.version >= 870: - children["bindingconstraints"] = BindingConstraintMatrixList( - self.context, - self.config.next_file("bindingconstraints"), - matrix_class=TsNumbersVector, ) return children diff --git a/antarest/study/web/study_data_blueprint.py b/antarest/study/web/study_data_blueprint.py index e84029ff8f..43d71400d5 100644 --- a/antarest/study/web/study_data_blueprint.py +++ b/antarest/study/web/study_data_blueprint.py @@ -2185,6 +2185,7 @@ def delete_thermal_clusters( ) request_params = RequestParameters(user=current_user) study = study_service.check_study_access(uuid, StudyPermissionType.WRITE, request_params) + study_service.asserts_no_thermal_in_binding_constraints(study, area_id, cluster_ids) study_service.thermal_manager.delete_clusters(study, area_id, cluster_ids) @bp.get( diff --git a/docs/CHANGELOG.md b/docs/CHANGELOG.md index bdabc067c7..031fddd8b1 100644 --- a/docs/CHANGELOG.md +++ b/docs/CHANGELOG.md @@ -1,6 +1,33 @@ Antares Web Changelog ===================== +v2.17.3 (2024-07-18) +-------------------- + +### Features + +* **api**: do not allow areas, links or thermals deletion when referenced in a binding constraint [`2061`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/2061) +* **outputs**: build outputs tree based on filesystem [`2064`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/2064) +* **api-raw**: raise a 404 Not Found error when a resource is missing in the study [`2078`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/2078) + +### Bug Fixes + +* **ui-clusters**: improve cell number values accuracy by using rounding instead of truncating [`2087`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/2087) +* **ui-commons**: prompt from Form displayed on dialog validation [`2089`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/2089) + +### Continuous integration + +* **workflows**: update Actions in GitHub workflows [`2080`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/2080) + +### Documentation + +* user-guide: updating Binding Constraints Commands documentation and metadata for search [`2082`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/2082) +* user-guide: improve the user guide and add "How to Create a New Study?" topic [`2081`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/2081) + + +**Full Changelog**: https://github.com/AntaresSimulatorTeam/AntaREST/compare/v2.17.2...v.2.17.3 + + v2.17.2 (2024-06-19) -------------------- diff --git a/docs/assets/media/how-to/study-create-button.png b/docs/assets/media/how-to/study-create-button.png new file mode 100644 index 0000000000..fb149407cc Binary files /dev/null and b/docs/assets/media/how-to/study-create-button.png differ diff --git a/docs/assets/media/how-to/study-create-form.png b/docs/assets/media/how-to/study-create-form.png new file mode 100644 index 0000000000..725cb504d6 Binary files /dev/null and b/docs/assets/media/how-to/study-create-form.png differ diff --git a/docs/assets/media/how-to/study-create-other-options.png b/docs/assets/media/how-to/study-create-other-options.png new file mode 100644 index 0000000000..1d0db58c26 Binary files /dev/null and b/docs/assets/media/how-to/study-create-other-options.png differ diff --git a/docs/assets/media/how-to/study-creation-edit-properties.png b/docs/assets/media/how-to/study-creation-edit-properties.png new file mode 100644 index 0000000000..80b4a3e4d0 Binary files /dev/null and b/docs/assets/media/how-to/study-creation-edit-properties.png differ diff --git a/docs/assets/media/how-to/study-creation-move-to-subfolder.png b/docs/assets/media/how-to/study-creation-move-to-subfolder.png new file mode 100644 index 0000000000..db5f64e66e Binary files /dev/null and b/docs/assets/media/how-to/study-creation-move-to-subfolder.png differ diff --git a/docs/how-to/studies-create.md b/docs/how-to/studies-create.md new file mode 100644 index 0000000000..8123872433 --- /dev/null +++ b/docs/how-to/studies-create.md @@ -0,0 +1,160 @@ +--- +title: How to Create a New Study? +author: Laurent LAPORTE +date: 2024-07-03 +tags: + + - tutorial + - guide + - create + - version + - new + - study management + - workspace +--- + +# How to Create a New Study? + +Creating a new study in Antares Web is a straightforward process designed to facilitate users in setting up their energy +system simulations. +This guide will walk you through the steps to create a new study, from initial setup to finalization. + +## Open the "Create Study" dialog + +Navigate to the "Studies" pages to display the list of studies. +Click on the "Create" button to open the "Create Study" dialog box. + +![List of studies](../assets/media/how-to/study-create-button.png) + +## Fill in Study Properties + +In the "Create Study" dialog, you will be prompted to enter details about your study, such as: + +- **Study Name**: Give your study a unique and descriptive name. +- **Version**: Select the version you wish to use, by default, the latest version is selected. +- **Permission**: + - **Public Mode**: Select this option if you want to share your study with other users (it is recommended to + select "READ" permission). + - **Group**: Choose the groups you want to share your study with. +- **Metadata**: + - **Tag**: Add tags to your study to help categorize and organize it (enter each tag, and presse Ctrl+Enter to add + it). + +![Create Study Form](../assets/media/how-to/study-create-form.png) + +Validate the form by clicking the "Save" button. + +Your study is automatically saved in the `default` folder, which is the default folder of managed studies. + +## Customize Study Properties + +After creating your study, you can customize it further by moving it in a subfolder, changing its permission, or adding +metadata. + +Select the `default` folder to display the list of studies in this folder. +Click on the "More options" button of your study to access the "Properties" and "Move" options. + +![Other Options Menu](../assets/media/how-to/study-create-other-options.png) + +### Move Study in a Subfolder + +To move your study in a subfolder, click on the "Move" option. +The "Move Study" dialog opens, allowing you to select the destination folder. + +Enter the name of the subfolder and validate by clicking the "Save" button. + +![Move Study Dialog](../assets/media/how-to/study-creation-move-to-subfolder.png) + +If the subfolder does not exist, it will be created automatically. + +### Edit the Study Properties + +To edit the study properties, click on the "Properties" option. +The "Edit Study" dialog opens, allowing you to modify the study name, permission, and metadata: + +- **Study Name**: Give your study a unique and descriptive name. +- **Permission**: + - **Public Mode**: Select this option if you want to share your study with other users (it is recommended to + select "READ" permission). + - **Group**: Choose the groups you want to share your study with. +- **Metadata**: + - **Tag**: Add tags to your study to help categorize and organize it (enter each tag, and presse Ctrl+Enter to add + it). + +![Edit Study Dialog](../assets/media/how-to/study-creation-edit-properties.png) + +Validate the form by clicking the "Save" button. + +> **NOTE:** It is not possible to modify the version of a study after its creation, +> but you can still upgrade the version of the study. +> Refer to the [How to Upgrade a Study?](studies-upgrade.md) section for more information. + +## Creating a Study Using the API Endpoint + +The following Python script demonstrates how to create a study using the API endpoint `POST /v1/studies`: + +You need to provide the following parameters: + +- `name`: name of the study +- `version`: version of the study +- `groups`: list of groups to which the study will be assigned (optional) + +Make sure you have the correct API URL and a valid authentication token. + +```python +import httpx # or requests + +URL = "https://antares-web/api" +TOKEN = "" + +study = { + "name": "My New Study", + "version": 860, + "groups": "group1, group2" +} + +with httpx.Client(verify=False, headers={"Authorization ": f"Bearer {TOKEN}"}) as client: + res = client.post(f"{URL}/v1/studies", params=study) + +res.raise_for_status() +study_id = res.json() +``` + +The script above creates a new study named `My New Study` with version `860` and assigns it to the groups `group1` +and `group2`. + +Here is a breakdown of what each part of the code does: + +1. `import httpx`: This line imports the `httpx` library, which is used for making HTTP requests in Python. + Alternatively, the `requests` library can be used instead of `httpx` for the same purpose. + +2. `URL = "https://antares-web/api"`: This line sets the URL to which the POST request will be made. + You need to provide the right URL according to your own Antares Web server. + +3. `TOKEN = ""`: This line sets the authentication token that will be used in the request. + You should replace `` with your actual authentication token. + +4. The `study = {...}` block defines the properties of the study to be created. + +5. The `with httpx.Client(verify=False, headers=...) as client:` block creates an HTTP client. + The `verify=False` argument is used to disable SSL certificate verification. + The `headers={"Authorization ": f"Bearer {TOKEN}"}` argument sets authentication token. + +6. The `res = client.post(f"{URL}/v1/studies", params=study)` line sends a POST request to create the study. + The `params=study` argument sends the study properties as JSON data in the request body. + +7. `res.raise_for_status()` checks if the response from the server indicates an error. + If an error is detected, it raises an exception. + +8. `study_id = res.json()` parses the response from the server, assuming it is in JSON format, + and assigns it to the variable `study_id`. + +See also: + +- ["User account & api tokens"](../user-guide/1-interface.md#user-account-and-api-tokens) in the user guide. + +## See also + +- [How to Upgrade a Study?](studies-upgrade.md) -- Upgrade a study to a recent version +- [How to Import a Compressed Study?](studies-import.md) - Import a study from a compressed file +- How to Run a study simulation? - Run a simulation on a study diff --git a/docs/how-to/studies-import.md b/docs/how-to/studies-import.md index 4434131437..153b139fdc 100644 --- a/docs/how-to/studies-import.md +++ b/docs/how-to/studies-import.md @@ -4,9 +4,14 @@ author: Laurent LAPORTE date: 2023-10-25 tags: - - import - - zip - - 7z + - tutorial + - guide + - import + - zip + - 7z + - compressed + - study management + - workspace --- @@ -56,10 +61,20 @@ The import dialog box will appear. Click the "Browse" button to select the compr You can also drag and drop the compressed file into the dialog box. -Once imported, you can see the study in the list of studies. Select the "default" workspace to view the imported study. You can also search for the study by name using the search input. +Once imported, you can see the study in the list of studies. Select the "default" workspace to view the imported study. +You can also search for the study by name using the search input. ![studies-import-studies-list.png](../assets/media/how-to/studies-import-studies-list.png) +> **NOTE:** The properties of the imported study can be modified by clicking on the "More options" button and +> selecting "Properties". You can change the study name, permission, and metadata. +> Refer to the [Customize Study Properties](studies-create.md#customize-study-properties) paragraph +> for more information. + +> **NOTE:** It is not possible to modify the version of a study after its import, +> but you can still upgrade the version of the study. +> Refer to the [How to Upgrade a Study?](studies-upgrade.md) section for more information. + ## Importing a Study Using the API Endpoint The following Python script demonstrates how to import a study using the API endpoint `POST /v1/studies/_import`: @@ -78,21 +93,20 @@ URL = "https://antares-web/api" TOKEN = "" with open("perso/new_study.zip", mode="rb") as fd: - with httpx.Client(verify=False) as client: + with httpx.Client(verify=False, headers={"Authorization ": f"Bearer {TOKEN}"}) as client: res = client.post( f"{URL}/v1/studies/_import", - headers={"Authorization": f"Bearer {TOKEN}"}, files={"study": fd}, params={"groups": "foo,bar"}, ) res.raise_for_status() -study_uuid = res.json() +study_id = res.json() ``` The script above imports the compressed file `perso/new_study.zip` and assigns the study to the groups `foo` and `bar`. -Here's a breakdown of what each part of the code does: +Here is a breakdown of what each part of the code does: 1. `import httpx`: This line imports the `httpx` library, which is used for making HTTP requests in Python. Alternatively, the `requests` library can be used instead of `httpx` for the same purpose. @@ -105,19 +119,26 @@ Here's a breakdown of what each part of the code does: 4. The `with open("perso/new_study.zip", mode="rb") as fd:` block opens the specified compressed file in binary mode. -5. The `with httpx.Client(verify=False) as client:` block creates an HTTP client. +5. The `with httpx.Client(verify=False, headers=...) as client:` block creates an HTTP client. The `verify=False` argument is used to disable SSL certificate verification. + The `headers={"Authorization ": f"Bearer {TOKEN}"}` argument sets authentication token. 6. `res = client.post(...)` makes a POST request to the specified URL with the provided parameters. It sends the file contents, sets the headers with the authentication token, and adds query parameters. 7. `res.raise_for_status()` checks if the response from the server indicates an error. If an error is detected, it raises an exception. - You may have the HTTP error 415 if the file is not a valid ZIP of 7z file. + You may have the HTTP error 415 if the file is not a valid ZIP of 7z file. -8. `study_uuid = res.json()` parses the response from the server, assuming it is in JSON format, - and assigns it to the variable `study_uuid`. +8. `study_id = res.json()` parses the response from the server, assuming it is in JSON format, + and assigns it to the variable `study_id`. See also: - ["User account & api tokens"](../user-guide/1-interface.md#user-account-and-api-tokens) in the user guide. + +## See also + +- [How to Create a New Study?](studies-create.md) -- Create a new study in Antares Web +- [How to Upgrade a Study?](studies-upgrade.md) -- Upgrade a study to a recent version +- How to Run a study simulation? - Run a simulation on a study diff --git a/docs/how-to/studies-upgrade.md b/docs/how-to/studies-upgrade.md index fe48723539..f92a1855ff 100644 --- a/docs/how-to/studies-upgrade.md +++ b/docs/how-to/studies-upgrade.md @@ -2,10 +2,15 @@ title: How to Upgrade a Study? author: Laurent LAPORTE date: 2023-03-10 +revision: 2024-07-03 tags: -- upgrade -- version + - tutorial + - guide + - upgrade + - version + - study management + - raw --- @@ -31,6 +36,10 @@ hesitate to contact our support team for assistance. ## Upgrading +> **WARNING:** Upgrading a study is only possible if the study is a **raw study** without any variants, +> as it is not possible to update its descendants. Also, upgrading a variant study is not possible. +> If you have a variant study, you must first create a new raw study and then upgrade it. + To upgrade your study to the latest version of Antares Web and Antares Simulator, you can follow these steps: On the main page of the study, you can find the version number at the top of the menu bar: @@ -58,5 +67,6 @@ Once the upgrade is complete, you can open your study and perform the manual upg ## See also -- Create a new study in the latest version -- Run a study in the latest version +- [How to Create a New Study?](studies-create.md) -- Create a new study in Antares Web +- [How to Import a Compressed Study?](studies-import.md) - Import a study from a compressed file +- How to Run a study simulation? - Run a simulation on a study diff --git a/docs/index.md b/docs/index.md index de7365f71b..4e1a2f818f 100644 --- a/docs/index.md +++ b/docs/index.md @@ -4,29 +4,32 @@ ![TypeScript](https://img.shields.io/badge/TypeScript-00599c?style=for-the-badge&logo=TypeScript&logoColor=61DAFB) ![React](https://img.shields.io/badge/React-00599c?style=for-the-badge&logo=react&logoColor=61DAFB) +![](assets/antares.png "Antares Web Logo") +> Web API and UI for [Antares Simulator][antares-simulator-website] -![antares logo](assets/antares.png) -> Web API and UI for [Antares Simulator][antareswebsite] - -This package works along with RTE's adequacy software [Antares Simulator][antareswebsite] that is also [hosted on github][antares-github] - -Please see the [Antares Web Documentation][readthedocs] for an introductory tutorial, -and a full user guide. Visit the [Antares-Simulator Documentation][readthedocs-antares] for more insights on ANTARES. +Please see the [Antares Web Documentation][antares-web-readthedocs] for an introductory tutorial, +and a full user guide. Visit the [Antares-Simulator Documentation][antares-simulator-readthedocs] for more insights on +ANTARES. ## Introduction -`antares-web` is a server api interfacing Antares Simulator solver and studies management. It provides a web application to manage studies -adding more features to simple edition. +Welcome to `antares-web`, a comprehensive web application designed to interface with RTE’s adequacy software, +the [Antares Simulator][antares-simulator-website], also [hosted on GitHub][antares-simulator-github]. +The Antares Simulator is an open-source power system simulator for anyone valuing the quantification of adequacy or the +economic performance of interconnected energy systems over short or distant time horizons. +It enables detailed modeling of energy consumption, generation, and transportation, performing probabilistic simulations +across numerous year-long scenarios, each consisting of 8760 hourly time-frames. + +`antares-web` serves as a server API interfacing with Antares Simulator studies, providing a web application to manage +studies while adding features for enhanced edition capabilities. -This brings: +This integration brings: -> - **application interoperability** : assign unique id to studies, expose operation endpoint api -> -> - **optimized storage**: extract matrices data and share them between studies, archive mode -> -> - **variant management**: add a new editing description language and generation tool -> -> - **user accounts** : add user management and permission system +- **Application Interoperability**: Assign unique IDs to studies and expose operations through an endpoint API, + facilitating integration with other applications and services. +- **Optimized Storage**: Extract matrices data and share them between studies, supporting archive mode. +- **Variant Management**: Introduce a new editing description language and generation tool. +- **User Accounts**: Implement user management and permission systems. ## Documentation @@ -34,18 +37,24 @@ This brings: - [Using the application](./user-guide/0-introduction.md) - [Contributing to the application code](./architecture/0-introduction.md) - `Antares-Web` is currently under development. Feel free to submit any issue. +[ci_result]: https://github.com/AntaresSimulatorTeam/AntaREST/actions/workflows/main.yml/badge.svg + +[ci_result_link]: https://github.com/AntaresSimulatorTeam/AntaREST/actions/workflows/main.yml -[ci_result]: https://github.com/AntaresSimulatorTeam/AntaREST/workflows/main/badge.svg -[ci_result_link]: https://github.com/AntaresSimulatorTeam/AntaREST/actions?query=workflow%3Amain [coverage_result]: https://sonarcloud.io/api/project_badges/measure?project=AntaresSimulatorTeam_api-iso-antares&metric=coverage + [coverage_result_link]: https://sonarcloud.io/dashboard?id=AntaresSimulatorTeam_api-iso-antares + [license_badge]: https://img.shields.io/github/license/AntaresSimulatorTeam/AntaREST + [license_link]: https://www.apache.org/licenses/LICENSE-2.0 -[antares-github]: https://github.com/AntaresSimulatorTeam/Antares_Simulator -[readthedocs]: https://antares-web.readthedocs.io/ -[readthedocs-antares]: https://antares-simulator.readthedocs.io/ -[antareswebsite]: https://antares-simulator.org +[antares-web-readthedocs]: https://antares-web.readthedocs.io/ + +[antares-simulator-readthedocs]: https://antares-simulator.readthedocs.io/ + +[antares-simulator-website]: https://antares-simulator.org + +[antares-simulator-github]: https://github.com/AntaresSimulatorTeam/Antares_Simulator diff --git a/docs/user-guide/0-introduction.md b/docs/user-guide/0-introduction.md index 99fa5637ac..de2bd50eb7 100644 --- a/docs/user-guide/0-introduction.md +++ b/docs/user-guide/0-introduction.md @@ -1,26 +1,48 @@ +--- +title: Introduction to Antares Web +author: Antares Web Team +date: 2021-10-05 +category: User Guide +tags: + + - introduction + - variant + - solver + - manager + +--- + # Introduction -![](../assets/antares.png) +![](../assets/antares.png "Antares Web Logo") + +Welcome to `antares-web`, a comprehensive web application designed to interface with RTE’s adequacy software, +the [Antares Simulator][antares-simulator-website], also [hosted on GitHub][antares-simulator-github]. +The Antares Simulator is an open-source power system simulator for anyone valuing the quantification of adequacy or the +economic performance of interconnected energy systems over short or distant time horizons. +It enables detailed modeling of energy consumption, generation, and transportation, performing probabilistic simulations +across numerous year-long scenarios, each consisting of 8760 hourly time-frames. + +`antares-web` serves as a server API interfacing with Antares Simulator studies, providing a web application to manage +studies while adding features for enhanced edition capabilities. + +This integration brings: -This package works along with RTE's adequacy software [Antares Simulator](https://antares-simulator.org) -that is also [hosted on github][antares-github] +- **Application Interoperability**: Assign unique IDs to studies and expose operations through an endpoint API, + facilitating integration with other applications and services. +- **Optimized Storage**: Extract matrices data and share them between studies, supporting archive mode. +- **Variant Management**: Introduce a new editing description language and generation tool. +- **User Accounts**: Implement user management and permission systems. -`antares-web` is a server api interfacing Antares Simulator studies. It provides a web application to manage studies -adding more features to simple edition. +## Variant Manager -This brings: +`antares-web` introduces an edition event store that tracks changes, simplifying the creation of study "variants" and +allowing for explicit diff change comparisons between studies. -> - **application interoperability** : assign unique id to studies, expose operation endpoint api -> -> - **optimized storage**: extract matrices data and share them between studies, archive mode -> -> - **variant management**: add a new editing description language and generation tool -> -> - **user accounts** : add user management and permission system +Explore the suite of features `antares-web` offers to enhance the Antares Simulator, improving study management, +interoperability, and user collaboration. -## Variant manager -`antares-web` brings an edition event store that provides a way to edit a study while keeping track of changes. -It eases the creation of "variants" of a study and allow an explicit diff change between studies. +[antares-simulator-website]: https://antares-simulator.org -You can read more information in [using the variant manager here](./3-variant_manager.md) +[antares-simulator-github]: https://github.com/AntaresSimulatorTeam/Antares_Simulator diff --git a/docs/user-guide/1-interface.md b/docs/user-guide/1-interface.md index ca002c205d..38d1088a0c 100644 --- a/docs/user-guide/1-interface.md +++ b/docs/user-guide/1-interface.md @@ -1,3 +1,21 @@ +--- +title: User Interface +author: Antares Web Team +date: 2021-11-03 +category: User Guide +tags: + + - ui + - job + - api + - token + - variant + - matrix + - dataset + - batch + - launch +--- + # User interface ## What's new (2.5.0) @@ -6,39 +24,43 @@ - [Strict folder filtering](#strict-folder-filtering) - [Zipped output retrieval](#launch-dialog) +The application is split into 3 main menus: Studies, Jobs, and Data. +API documentation, external reference links, and user account details are also available. -The application is split in 3 main menus : Studies, Jobs and Data. -API documentation, external reference links and user account details is also available. - - - "Studies" is the main section and redirects to the study listing where we can browse studies and work -on them. - - "Jobs" is a monitoring section which display currently running or latest execution jobs - - "Data" is a section where we can manage matrix data that can be then used in the [variant manager](#variant-management) +- "Studies" is the main section and redirects to the study listing where we can browse studies and work on them. +- "Jobs" is a monitoring section that displays currently running or latest execution jobs. +- "Data" is a section where we can manage matrix data that can then be used in + the [variant manager](#variant-management). ![](../assets/media/img/userguide_mainmenu.png) ## Study listing The study listing view is the main view, which provides : + - the listing of existing studies - filters/sorting/tree view - creation/import tool Studies are linked to a "workspace" which refers to a storage disk. The workspace "default" (orange colored) is -the internal storage where "managed" studies live. These studies files aren't meant to be accessible directly (via disk mount for instance). -The other workspaces are studies that are found on mounted workspace and their unique ID can change if the studies are moved. +the internal storage where "managed" studies live. These studies files aren't meant to be accessible directly (via disk +mount for instance). +The other workspaces are studies that are found on mounted workspace and their unique ID can change if the studies are +moved. + +Copied studies are always copied within the managed workspace. These managed studies though not directly accessible +offers additional features: -Copied studies are always copied within the managed workspace. These managed studies though not directly accessible offers additional features: - a permanent ID - archiving - variant creation - faster operations - storage improvements - ![](../assets/media/img/userguide_studylisting.png) Some actions are available from this view: + - launching the study simulation - exporting the study - deleting the study @@ -49,13 +71,12 @@ Some actions are available from this view: When launching a study, a dialog will open with some choices. - ![](../assets/media/img/userguide_launch_dialog.png) - ### Launch batch mode -To launch multiple studies at once, we can click on the checkbox icon to enable selection mode. In this mode, we can click +To launch multiple studies at once, we can click on the checkbox icon to enable selection mode. In this mode, we can +click on study cards to select / unselect them. Then clicking on the launch button will open the launch dialog. @@ -63,17 +84,18 @@ the launch dialog. ### Strict folder filtering -The folder icon next to the breadcrumb path allow to filter (when activated) the studies to only the direct descendant of the selected folder. +The folder icon next to the breadcrumb path allow to filter (when activated) the studies to only the direct descendant +of the selected folder. ![](../assets/media/img/userguide_strict_folder_filter.png) - For more operation over a study, we can click on a study "explore" button and go to the dedicated study view. The url of dedicated study view can be bookmarked for sharing or quick access. ## Study view The study view is composed of 2 or 3 main menus depending on the managed status of the study. + - ["Information"](#overview) view is an overview of the study - ["Detailed view"](#detailed-view) is a raw view of the study contents - ["Variant"](#variant-management) view is where we can manage the variant of a study if it is managed @@ -81,6 +103,7 @@ The study view is composed of 2 or 3 main menus depending on the managed status ### Overview The overview provides access to : + - basic metadata - name and permission edition (a study can be public or associated with groups with specific permissions) - simulation execution monitoring @@ -92,6 +115,7 @@ The overview provides access to : The variant command tab is only available for managed variant studies. It shows an edition view where we can: + - edit the command list composing the variant - monitor or verify the result of the generation process @@ -99,11 +123,10 @@ It shows an edition view where we can: ### Detailed view -The detailed view is a tree representation of a study files. +The detailed view is a tree representation of a study files. It can be browsed and node can be viewed and edited. :warning: The view can take some time to load the first time. - Example of the detailed view of a configuration node (ini files): ![](../assets/media/img/userguide_treeview_json.png) @@ -112,11 +135,10 @@ Example of the detailed view of a matrix node (txt data files): ![](../assets/media/img/userguide_treeview_matrix.png) - ## Data management The data view display dataset which are list of matrices. -These matrices can then be used as argument in [variant manager commands](./3-variant_manager.md#base-commands). +These matrices can then be used as argument in [variant manager commands](./3-variant_manager.md#command-list). ![](../assets/media/img/userguide_dataset_listing.png) @@ -131,12 +153,15 @@ These token can be used in scripts that will use the [API](#api-documentation). ![](../assets/media/img/userguide_token_listing.png) -We can choose to assign specific permission to the token and can choose if the scripts using the token will impersonate our user or not. +We can choose to assign specific permission to the token and can choose if the scripts using the token will impersonate +our user or not. If we choose the later, studies created using the token will be owned by a new user that will have the token's name. ![](../assets/media/img/userguide_token_creation.png) -We have to save the token (as it is generated once and not saved). It will then be used as an authentication token in HTTP Basic Auth, eg.: +We have to save the token (as it is generated once and not saved). It will then be used as an authentication token in +HTTP Basic Auth, eg.: + ``` curl -H 'Authorization: Bearer ' https://antares-web/api/studies ``` diff --git a/docs/user-guide/2-study.md b/docs/user-guide/2-study.md index 12713241ef..2db09dbbf4 100644 --- a/docs/user-guide/2-study.md +++ b/docs/user-guide/2-study.md @@ -1,3 +1,20 @@ +--- +title: Study Configuration +author: Antares Web Team +date: 2023-12-21 +category: User Guide +tags: + + - configuration + - map + - network + - areas + - links + - binding-constraints + - debug + - table-mode +--- + # Study Configuration This page is dedicated to configuring the study in the Antares Web application. diff --git a/docs/user-guide/3-variant_manager.md b/docs/user-guide/3-variant_manager.md index 6762ee4c86..8b8ceb4ed7 100644 --- a/docs/user-guide/3-variant_manager.md +++ b/docs/user-guide/3-variant_manager.md @@ -1,3 +1,17 @@ +--- +title: Variant Manager +author: Antares Web Team +date: 2021-10-29 +category: User Guide +tags: + + - variant + - manager + - configuration + - command + - API +--- + # Variant Manager ## Introduction @@ -222,9 +236,15 @@ Create a new binding constraint "enabled?": " (default: True)", "time_step": "'hourly' | 'weekly' | 'daily'", "operator": "'equal' | 'both' | 'greater' | 'less'", + "comments?": "", + "group?": "", + "filter_year_by_year?": "", + "filter_synthesis?": "", "coeffs": "", "values?": "", - "comments?": "" + "less_term_matrix?": "", + "greater_term_matrix?": "", + "equal_term_matrix?": "" } ``` @@ -250,6 +270,20 @@ Or link `CONSTRAINT_COEFF` is: } ``` +> **Available Since v8.3:** +> +> The `filter_year_by_year` and `filter_synthesis` fields are only available for studies since v8.3. +> Those fields are used for the Geographic Trimming. +> Possible values are on or several of the following: "hourly", "daily", "weekly", "monthly", "annual". + +> **Available Since v8.7:** +> +> The `group` fields and the `less_term_matrix`, `greater_term_matrix`, `equal_term_matrix` time series fields +> are available since v8.7. +> The `group` field is used to group constraints in the Monte Carlo Scenario Builder (default is `default`). +> The time series matrices are used to define the Right-Hand Side (RHS) of the binding constraint. +> Note that the `values` field must not be used when using the time series matrices. + ### `update_binding_constraint` Update an existing binding constraint @@ -260,12 +294,20 @@ Update an existing binding constraint "enabled?": " (default: True)", "time_step": "'hourly' | 'weekly' | 'daily'", "operator": "'equal' | 'both' | 'greater' | 'less'", + "comments?": "", + "group?": "", + "filter_year_by_year?": "", + "filter_synthesis?": "", "coeffs": "", "values?": "", - "comments?": "" + "less_term_matrix?": "", + "greater_term_matrix?": "", + "equal_term_matrix?": "" } ``` +See [create_binding_constraint](#create_binding_constraint) for the details of the fields. + ### `remove_binding_constraint` Remove an existing binding constraint @@ -325,6 +367,8 @@ Replace arbitrary data file (must not be a matrix or ini target) with a base64 e ### `create_st_storage` +> **Available Since v8.6** + Create a new short-term storage ```json @@ -341,6 +385,8 @@ Create a new short-term storage ### `remove_st_storage` +> **Available Since v8.6** + Remove an existing short-term storage ```json @@ -356,7 +402,7 @@ Coming soon ### Composite commands -Comming soon +Coming soon ## CLI Tool diff --git a/examples/studies/STA-mini.zip b/examples/studies/STA-mini.zip index 1df2db61f8..0a4cd17045 100644 Binary files a/examples/studies/STA-mini.zip and b/examples/studies/STA-mini.zip differ diff --git a/mkdocs.yml b/mkdocs.yml index 6a0c971cb6..5a28ea96df 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -58,6 +58,7 @@ nav: - 'Main Topics': 'user-guide/study/06-table-mode.md' - 'Variant manager': 'user-guide/3-variant_manager.md' - 'How to': + - 'Create a study': 'how-to/studies-create.md' - 'Import a study': 'how-to/studies-import.md' - 'Upgrade a study': 'how-to/studies-upgrade.md' - 'Build': diff --git a/setup.py b/setup.py index 002035a113..7e6da75281 100644 --- a/setup.py +++ b/setup.py @@ -6,7 +6,7 @@ setup( name="AntaREST", - version="2.17.2", + version="2.17.3", description="Antares Server", long_description=Path("README.md").read_text(encoding="utf-8"), long_description_content_type="text/markdown", diff --git a/sonar-project.properties b/sonar-project.properties index 4513c749b0..e7a18f5010 100644 --- a/sonar-project.properties +++ b/sonar-project.properties @@ -6,5 +6,5 @@ sonar.exclusions=antarest/gui.py,antarest/main.py sonar.python.coverage.reportPaths=coverage.xml sonar.python.version=3.8 sonar.javascript.lcov.reportPaths=webapp/coverage/lcov.info -sonar.projectVersion=2.17.2 +sonar.projectVersion=2.17.3 sonar.coverage.exclusions=antarest/gui.py,antarest/main.py,antarest/singleton_services.py,antarest/worker/archive_worker_service.py,webapp/**/* \ No newline at end of file diff --git a/tests/core/test_exceptions.py b/tests/core/test_exceptions.py new file mode 100644 index 0000000000..86892187a0 --- /dev/null +++ b/tests/core/test_exceptions.py @@ -0,0 +1,25 @@ +from antarest.core.exceptions import ReferencedObjectDeletionNotAllowed + + +class TestReferencedObjectDeletionNotAllowed: + def test_few_binding_constraints(self) -> None: + object_id = "france" + binding_ids = ["bc1", "bc2"] + object_type = "Area" + exception = ReferencedObjectDeletionNotAllowed(object_id, binding_ids, object_type=object_type) + message = str(exception) + assert f"{object_type} '{object_id}'" in message + assert "bc1" in message + assert "bc2" in message + assert "more..." not in message + + def test_many_binding_constraints(self) -> None: + object_id = "france" + binding_ids = [f"bc{i}" for i in range(1, 50)] + object_type = "Area" + exception = ReferencedObjectDeletionNotAllowed(object_id, binding_ids, object_type=object_type) + message = str(exception) + assert f"{object_type} '{object_id}'" in message + assert "bc1" in message + assert "bc2" in message + assert "more..." in message diff --git a/tests/helpers.py b/tests/helpers.py index 0736eafd59..d9652b6585 100644 --- a/tests/helpers.py +++ b/tests/helpers.py @@ -1,3 +1,4 @@ +import math import time import uuid from datetime import datetime, timedelta, timezone @@ -66,6 +67,8 @@ def assert_study(a: SUB_JSON, b: SUB_JSON) -> None: _assert_list(cast(List[float], a.tolist()), b) elif isinstance(a, list) and isinstance(b, np.ndarray): _assert_list(a, cast(List[float], b.tolist())) + elif isinstance(a, float) and math.isnan(a): + assert math.isnan(b) else: _assert_others(a, b) diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 0ac1e1763f..64d00e745d 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -120,12 +120,12 @@ def user_access_token_fixture( return t.cast(str, credentials["access_token"]) -@pytest.fixture(name="study_id") -def study_id_fixture( +@pytest.fixture(name="internal_study_id") +def internal_study_fixture( client: TestClient, user_access_token: str, ) -> str: - """Get the ID of the study stored in database""" + """Get the ID of the internal study which is scanned by the watcher""" res = client.get( "/v1/studies", headers={"Authorization": f"Bearer {user_access_token}"}, diff --git a/tests/integration/raw_studies_blueprint/test_aggregate_raw_data.py b/tests/integration/raw_studies_blueprint/test_aggregate_raw_data.py index 70cad9cd28..1dd53fdfe2 100644 --- a/tests/integration/raw_studies_blueprint/test_aggregate_raw_data.py +++ b/tests/integration/raw_studies_blueprint/test_aggregate_raw_data.py @@ -193,7 +193,7 @@ def test_area_aggregation( self, client: TestClient, user_access_token: str, - study_id: str, + internal_study_id: str, ): """ Test the aggregation of areas data @@ -202,7 +202,7 @@ def test_area_aggregation( for params, expected_result_filename in AREAS_REQUESTS: output_id = params.pop("output_id") - res = client.get(f"/v1/studies/{study_id}/areas/aggregate/{output_id}", params=params) + res = client.get(f"/v1/studies/{internal_study_id}/areas/aggregate/{output_id}", params=params) assert res.status_code == 200, res.json() content = io.BytesIO(res.content) df = pd.read_csv(content, index_col=0, sep=",") @@ -222,7 +222,7 @@ def test_links_aggregation( self, client: TestClient, user_access_token: str, - study_id: str, + internal_study_id: str, ): """ Test the aggregation of links data @@ -231,7 +231,7 @@ def test_links_aggregation( for params, expected_result_filename in LINKS_REQUESTS: output_id = params.pop("output_id") - res = client.get(f"/v1/studies/{study_id}/links/aggregate/{output_id}", params=params) + res = client.get(f"/v1/studies/{internal_study_id}/links/aggregate/{output_id}", params=params) assert res.status_code == 200, res.json() content = io.BytesIO(res.content) df = pd.read_csv(content, index_col=0, sep=",") @@ -251,7 +251,7 @@ def test_different_formats( self, client: TestClient, user_access_token: str, - study_id: str, + internal_study_id: str, ): """ Tests that all formats work and produce the same result @@ -260,7 +260,7 @@ def test_different_formats( for params, expected_result_filename in SAME_REQUEST_DIFFERENT_FORMATS: output_id = params.pop("output_id") - res = client.get(f"/v1/studies/{study_id}/links/aggregate/{output_id}", params=params) + res = client.get(f"/v1/studies/{internal_study_id}/links/aggregate/{output_id}", params=params) assert res.status_code == 200, res.json() content = io.BytesIO(res.content) export_format = params["format"] @@ -282,31 +282,33 @@ def test_different_formats( expected_df[col] = expected_df[col].astype(df[col].dtype) pd.testing.assert_frame_equal(df, expected_df) - def test_aggregation_with_incoherent_bodies(self, client: TestClient, user_access_token: str, study_id: str): + def test_aggregation_with_incoherent_bodies( + self, client: TestClient, user_access_token: str, internal_study_id: str + ): """ Asserts that requests with incoherent bodies don't crash but send empty dataframes """ client.headers = {"Authorization": f"Bearer {user_access_token}"} for params in INCOHERENT_REQUESTS_BODIES: output_id = params.pop("output_id") - res = client.get(f"/v1/studies/{study_id}/links/aggregate/{output_id}", params=params) + res = client.get(f"/v1/studies/{internal_study_id}/links/aggregate/{output_id}", params=params) assert res.status_code == 200, res.json() content = io.BytesIO(res.content) df = pd.read_csv(content, index_col=0, sep=",") assert df.empty - def test_wrongly_typed_request(self, client: TestClient, user_access_token: str, study_id: str): + def test_wrongly_typed_request(self, client: TestClient, user_access_token: str, internal_study_id: str): """ Asserts that wrongly typed requests send an HTTP 422 Exception """ client.headers = {"Authorization": f"Bearer {user_access_token}"} for params in WRONGLY_TYPED_REQUESTS: output_id = params.pop("output_id") - res = client.get(f"/v1/studies/{study_id}/links/aggregate/{output_id}", params=params) + res = client.get(f"/v1/studies/{internal_study_id}/links/aggregate/{output_id}", params=params) assert res.status_code == 422 assert res.json()["exception"] == "RequestValidationError" - def test_aggregation_with_wrong_output(self, client: TestClient, user_access_token: str, study_id: str): + def test_aggregation_with_wrong_output(self, client: TestClient, user_access_token: str, internal_study_id: str): """ Asserts that requests with wrong output send an HTTP 422 Exception """ @@ -314,7 +316,7 @@ def test_aggregation_with_wrong_output(self, client: TestClient, user_access_tok # test for areas res = client.get( - f"/v1/studies/{study_id}/areas/aggregate/unknown_id", + f"/v1/studies/{internal_study_id}/areas/aggregate/unknown_id", params={ "query_file": AreasQueryFile.VALUES, "frequency": MatrixFrequency.HOURLY, @@ -326,7 +328,7 @@ def test_aggregation_with_wrong_output(self, client: TestClient, user_access_tok # test for links res = client.get( - f"/v1/studies/{study_id}/links/aggregate/unknown_id", + f"/v1/studies/{internal_study_id}/links/aggregate/unknown_id", params={ "query_file": LinksQueryFile.VALUES, "frequency": MatrixFrequency.HOURLY, diff --git a/tests/integration/raw_studies_blueprint/test_download_matrices.py b/tests/integration/raw_studies_blueprint/test_download_matrices.py index d2430c5bf0..0f4e764089 100644 --- a/tests/integration/raw_studies_blueprint/test_download_matrices.py +++ b/tests/integration/raw_studies_blueprint/test_download_matrices.py @@ -45,13 +45,13 @@ def copy_upgrade_study(self, ref_study_id, target_version=820): assert task.status == TaskStatus.COMPLETED return study_820_id - def upload_matrix(self, study_id: str, matrix_path: str, df: pd.DataFrame) -> None: + def upload_matrix(self, internal_study_id: str, matrix_path: str, df: pd.DataFrame) -> None: tsv = io.BytesIO() df.to_csv(tsv, sep="\t", index=False, header=False) tsv.seek(0) # noinspection SpellCheckingInspection res = self.client.put( - f"/v1/studies/{study_id}/raw", + f"/v1/studies/{internal_study_id}/raw", params={"path": matrix_path, "create_missing": True}, headers=self.headers, files={"file": tsv, "create_missing": "true"}, @@ -92,9 +92,9 @@ def create_area(self, parent_id, *, name: str, country: str = "FR") -> str: area_id = res.json()["id"] return area_id - def update_general_data(self, study_id: str, **data: t.Any): + def update_general_data(self, internal_study_id: str, **data: t.Any): res = self.client.put( - f"/v1/studies/{study_id}/config/general/form", + f"/v1/studies/{internal_study_id}/config/general/form", json=data, headers=self.headers, ) @@ -107,7 +107,7 @@ class TestDownloadMatrices: Checks the retrieval of matrices with the endpoint GET studies/uuid/raw/download """ - def test_download_matrices(self, client: TestClient, user_access_token: str, study_id: str) -> None: + def test_download_matrices(self, client: TestClient, user_access_token: str, internal_study_id: str) -> None: user_headers = {"Authorization": f"Bearer {user_access_token}"} # ===================== @@ -116,7 +116,7 @@ def test_download_matrices(self, client: TestClient, user_access_token: str, stu preparer = PreparerProxy(client, user_access_token) - study_820_id = preparer.copy_upgrade_study(study_id, target_version=820) + study_820_id = preparer.copy_upgrade_study(internal_study_id, target_version=820) # Create Variant variant_id = preparer.create_variant(study_820_id, name="New Variant") @@ -131,7 +131,7 @@ def test_download_matrices(self, client: TestClient, user_access_token: str, stu preparer.generate_snapshot(variant_id) # Prepare a managed study to test specific matrices for version 8.6 - study_860_id = preparer.copy_upgrade_study(study_id, target_version=860) + study_860_id = preparer.copy_upgrade_study(internal_study_id, target_version=860) # Import a Min Gen. matrix: shape=(8760, 3), with random integers between 0 and 1000 generator = np.random.default_rng(11) @@ -226,7 +226,7 @@ def test_download_matrices(self, client: TestClient, user_access_token: str, stu # tests links headers before v8.2 res = client.get( - f"/v1/studies/{study_id}/raw/download", + f"/v1/studies/{internal_study_id}/raw/download", params={"path": "input/links/de/fr", "format": "tsv", "index": False}, headers=user_headers, ) @@ -275,7 +275,7 @@ def test_download_matrices(self, client: TestClient, user_access_token: str, stu # test for empty matrix res = client.get( - f"/v1/studies/{study_id}/raw/download", + f"/v1/studies/{internal_study_id}/raw/download", params={"path": "input/hydro/common/capacity/waterValues_de", "format": "tsv"}, headers=user_headers, ) @@ -305,7 +305,7 @@ def test_download_matrices(self, client: TestClient, user_access_token: str, stu # asserts endpoint returns the right columns for output matrix res = client.get( - f"/v1/studies/{study_id}/raw/download", + f"/v1/studies/{internal_study_id}/raw/download", params={ "path": "output/20201014-1422eco-hello/economy/mc-ind/00001/links/de/fr/values-hourly", "format": "tsv", @@ -331,7 +331,7 @@ def test_download_matrices(self, client: TestClient, user_access_token: str, stu # test energy matrix to test the regex res = client.get( - f"/v1/studies/{study_id}/raw/download", + f"/v1/studies/{internal_study_id}/raw/download", params={"path": "input/hydro/prepro/de/energy", "format": "tsv"}, headers=user_headers, ) diff --git a/tests/integration/raw_studies_blueprint/test_fetch_raw_data.py b/tests/integration/raw_studies_blueprint/test_fetch_raw_data.py index 8229fa543a..e55929c97a 100644 --- a/tests/integration/raw_studies_blueprint/test_fetch_raw_data.py +++ b/tests/integration/raw_studies_blueprint/test_fetch_raw_data.py @@ -25,7 +25,7 @@ def test_get_study( self, client: TestClient, user_access_token: str, - study_id: str, + internal_study_id: str, ): """ Test the `get_study` endpoint for fetching raw data from a study. @@ -42,7 +42,7 @@ def test_get_study( """ # First copy the user resources in the Study directory with db(): - study: RawStudy = db.session.get(Study, study_id) + study: RawStudy = db.session.get(Study, internal_study_id) study_dir = pathlib.Path(study.path) headers = {"Authorization": f"Bearer {user_access_token}"} @@ -57,7 +57,7 @@ def test_get_study( for file_path in user_folder_dir.glob("*.*"): rel_path = file_path.relative_to(study_dir).as_posix() res = client.get( - f"/v1/studies/{study_id}/raw", + f"/v1/studies/{internal_study_id}/raw", params={"path": rel_path, "depth": 1}, headers=headers, ) @@ -83,7 +83,7 @@ def test_get_study( for file_path in user_folder_dir.glob("*.*"): rel_path = file_path.relative_to(study_dir) res = client.get( - f"/v1/studies/{study_id}/raw", + f"/v1/studies/{internal_study_id}/raw", params={"path": f"/{rel_path.as_posix()}", "depth": 1}, headers=headers, ) @@ -94,7 +94,7 @@ def test_get_study( # If you try to retrieve a file that doesn't exist, we should have a 404 error res = client.get( - f"/v1/studies/{study_id}/raw", + f"/v1/studies/{internal_study_id}/raw", params={"path": "user/somewhere/something.txt"}, headers=headers, ) @@ -107,7 +107,7 @@ def test_get_study( # If you want to update an existing resource, you can use PUT method. # But, if the resource doesn't exist, you should have a 404 Not Found error. res = client.put( - f"/v1/studies/{study_id}/raw", + f"/v1/studies/{internal_study_id}/raw", params={"path": "user/somewhere/something.txt"}, headers=headers, files={"file": io.BytesIO(b"Goodbye World!")}, @@ -121,7 +121,7 @@ def test_get_study( # To create a resource, you can use PUT method and the `create_missing` flag. # The expected status code should be 204 No Content. res = client.put( - f"/v1/studies/{study_id}/raw", + f"/v1/studies/{internal_study_id}/raw", params={"path": "user/somewhere/something.txt", "create_missing": True}, headers=headers, files={"file": io.BytesIO(b"Goodbye Cruel World!")}, @@ -131,7 +131,7 @@ def test_get_study( # To update a resource, you can use PUT method, with or without the `create_missing` flag. # The expected status code should be 204 No Content. res = client.put( - f"/v1/studies/{study_id}/raw", + f"/v1/studies/{internal_study_id}/raw", params={"path": "user/somewhere/something.txt", "create_missing": True}, headers=headers, files={"file": io.BytesIO(b"This is the end!")}, @@ -140,7 +140,7 @@ def test_get_study( # You can check that the resource has been created or updated. res = client.get( - f"/v1/studies/{study_id}/raw", + f"/v1/studies/{internal_study_id}/raw", params={"path": "user/somewhere/something.txt"}, headers=headers, ) @@ -150,7 +150,7 @@ def test_get_study( # If we ask for properties, we should have a JSON content rel_path = "/input/links/de/properties/fr" res = client.get( - f"/v1/studies/{study_id}/raw", + f"/v1/studies/{internal_study_id}/raw", params={"path": rel_path, "depth": 2}, headers=headers, ) @@ -175,7 +175,7 @@ def test_get_study( # If we ask for a matrix, we should have a JSON content if formatted is True rel_path = "/input/links/de/fr" res = client.get( - f"/v1/studies/{study_id}/raw", + f"/v1/studies/{internal_study_id}/raw", params={"path": rel_path, "formatted": True}, headers=headers, ) @@ -186,7 +186,7 @@ def test_get_study( # If we ask for a matrix, we should have a CSV content if formatted is False rel_path = "/input/links/de/fr" res = client.get( - f"/v1/studies/{study_id}/raw", + f"/v1/studies/{internal_study_id}/raw", params={"path": rel_path, "formatted": False}, headers=headers, ) @@ -198,7 +198,7 @@ def test_get_study( # If ask for an empty matrix, we should have an empty binary content res = client.get( - f"/v1/studies/{study_id}/raw", + f"/v1/studies/{internal_study_id}/raw", params={"path": "input/thermal/prepro/de/01_solar/data", "formatted": False}, headers=headers, ) @@ -207,7 +207,7 @@ def test_get_study( # But, if we use formatted = True, we should have a JSON objet representing and empty matrix res = client.get( - f"/v1/studies/{study_id}/raw", + f"/v1/studies/{internal_study_id}/raw", params={"path": "input/thermal/prepro/de/01_solar/data", "formatted": True}, headers=headers, ) @@ -219,7 +219,7 @@ def test_get_study( for file_path in user_folder_dir.glob("*.*"): rel_path = file_path.relative_to(study_dir) res = client.get( - f"/v1/studies/{study_id}/raw", + f"/v1/studies/{internal_study_id}/raw", params={"path": f"/{rel_path.as_posix()}", "depth": 1}, headers=headers, ) @@ -228,7 +228,7 @@ def test_get_study( # We can access to the configuration the classic way, # for instance, we can get the list of areas: res = client.get( - f"/v1/studies/{study_id}/raw", + f"/v1/studies/{internal_study_id}/raw", params={"path": "/input/areas/list", "depth": 1}, headers=headers, ) @@ -237,7 +237,7 @@ def test_get_study( # asserts that the GET /raw endpoint is able to read matrix containing NaN values res = client.get( - f"/v1/studies/{study_id}/raw", + f"/v1/studies/{internal_study_id}/raw", params={"path": "output/20201014-1427eco/economy/mc-all/areas/de/id-monthly"}, headers=headers, ) @@ -247,7 +247,7 @@ def test_get_study( # Iterate over all possible combinations of path and depth for path, depth in itertools.product([None, "", "/"], [0, 1, 2]): res = client.get( - f"/v1/studies/{study_id}/raw", + f"/v1/studies/{internal_study_id}/raw", params={"path": path, "depth": depth}, headers=headers, ) diff --git a/tests/integration/studies_blueprint/test_comments.py b/tests/integration/studies_blueprint/test_comments.py index 378be0aed5..39ce84e35b 100644 --- a/tests/integration/studies_blueprint/test_comments.py +++ b/tests/integration/studies_blueprint/test_comments.py @@ -20,7 +20,7 @@ def test_raw_study( self, client: TestClient, user_access_token: str, - study_id: str, + internal_study_id: str, ) -> None: """ This test verifies that we can retrieve and modify the comments of a study. @@ -29,7 +29,7 @@ def test_raw_study( # Get the comments of the study and compare with the expected file res = client.get( - f"/v1/studies/{study_id}/comments", + f"/v1/studies/{internal_study_id}/comments", headers={"Authorization": f"Bearer {user_access_token}"}, ) assert res.status_code == 200, res.json() @@ -41,7 +41,7 @@ def test_raw_study( # Ensure the duration is relatively short start = time.time() res = client.get( - f"/v1/studies/{study_id}/comments", + f"/v1/studies/{internal_study_id}/comments", headers={"Authorization": f"Bearer {user_access_token}"}, ) assert res.status_code == 200, res.json() @@ -50,7 +50,7 @@ def test_raw_study( # Update the comments of the study res = client.put( - f"/v1/studies/{study_id}/comments", + f"/v1/studies/{internal_study_id}/comments", headers={"Authorization": f"Bearer {user_access_token}"}, json={"comments": "Ceci est un commentaire en franΓ§ais."}, ) @@ -58,7 +58,7 @@ def test_raw_study( # Get the comments of the study and compare with the expected file res = client.get( - f"/v1/studies/{study_id}/comments", + f"/v1/studies/{internal_study_id}/comments", headers={"Authorization": f"Bearer {user_access_token}"}, ) assert res.status_code == 200, res.json() @@ -68,7 +68,7 @@ def test_variant_study( self, client: TestClient, user_access_token: str, - study_id: str, + internal_study_id: str, ) -> None: """ This test verifies that we can retrieve and modify the comments of a VARIANT study. @@ -76,7 +76,7 @@ def test_variant_study( """ # First, we create a copy of the study, and we convert it to a managed study. res = client.post( - f"/v1/studies/{study_id}/copy", + f"/v1/studies/{internal_study_id}/copy", headers={"Authorization": f"Bearer {user_access_token}"}, params={"dest": "default", "with_outputs": False, "use_task": False}, # type: ignore ) diff --git a/tests/integration/studies_blueprint/test_disk_usage.py b/tests/integration/studies_blueprint/test_disk_usage.py index 1685acad87..dcfb48a5b7 100644 --- a/tests/integration/studies_blueprint/test_disk_usage.py +++ b/tests/integration/studies_blueprint/test_disk_usage.py @@ -10,7 +10,7 @@ def test_disk_usage_endpoint( self, client: TestClient, user_access_token: str, - study_id: str, + internal_study_id: str, tmp_path: Path, ) -> None: """ @@ -23,7 +23,7 @@ def test_disk_usage_endpoint( user_headers = {"Authorization": f"Bearer {user_access_token}"} res = client.get( - f"/v1/studies/{study_id}/disk-usage", + f"/v1/studies/{internal_study_id}/disk-usage", headers=user_headers, ) assert res.status_code == 200, res.json() @@ -32,7 +32,7 @@ def test_disk_usage_endpoint( # Copy the study in managed workspace in order to create a variant res = client.post( - f"/v1/studies/{study_id}/copy", + f"/v1/studies/{internal_study_id}/copy", headers=user_headers, params={"dest": "somewhere", "use_task": "false"}, ) diff --git a/tests/integration/studies_blueprint/test_study_matrix_index.py b/tests/integration/studies_blueprint/test_study_matrix_index.py index 4aeacceff4..7821ee6151 100644 --- a/tests/integration/studies_blueprint/test_study_matrix_index.py +++ b/tests/integration/studies_blueprint/test_study_matrix_index.py @@ -14,7 +14,7 @@ def test_get_study_matrix_index( self, client: TestClient, user_access_token: str, - study_id: str, + internal_study_id: str, ) -> None: user_access_token = {"Authorization": f"Bearer {user_access_token}"} @@ -23,7 +23,7 @@ def test_get_study_matrix_index( # Check the Common matrix index res = client.get( - f"/v1/studies/{study_id}/matrixindex", + f"/v1/studies/{internal_study_id}/matrixindex", headers=user_access_token, params={"path": "input/thermal/prepro/fr/01_solar/modulation"}, ) @@ -40,7 +40,7 @@ def test_get_study_matrix_index( # Check the TS Generator matrix index res = client.get( - f"/v1/studies/{study_id}/matrixindex", + f"/v1/studies/{internal_study_id}/matrixindex", headers=user_access_token, params={"path": "input/thermal/prepro/fr/01_solar/data"}, ) @@ -57,7 +57,7 @@ def test_get_study_matrix_index( # Check the time series res = client.get( - f"/v1/studies/{study_id}/matrixindex", + f"/v1/studies/{internal_study_id}/matrixindex", headers=user_access_token, params={"path": "input/thermal/series/fr/01_solar/series"}, ) @@ -75,7 +75,7 @@ def test_get_study_matrix_index( # Check the default matrix index # ============================== - res = client.get(f"/v1/studies/{study_id}/matrixindex", headers=user_access_token) + res = client.get(f"/v1/studies/{internal_study_id}/matrixindex", headers=user_access_token) assert res.status_code == 200 actual = res.json() expected = { @@ -90,7 +90,7 @@ def test_get_study_matrix_index( # ========================================================================= res = client.get( - f"/v1/studies/{study_id}/matrixindex", + f"/v1/studies/{internal_study_id}/matrixindex", headers=user_access_token, params={"path": "output/20201014-1427eco/economy/mc-all/areas/es/details-daily"}, ) diff --git a/tests/integration/studies_blueprint/test_synthesis.py b/tests/integration/studies_blueprint/test_synthesis.py index 1f10fa2989..2b5205824a 100644 --- a/tests/integration/studies_blueprint/test_synthesis.py +++ b/tests/integration/studies_blueprint/test_synthesis.py @@ -33,7 +33,7 @@ def test_raw_study( self, client: TestClient, user_access_token: str, - study_id: str, + internal_study_id: str, ) -> None: """ This test verifies that we can retrieve the synthesis of a study. @@ -42,7 +42,7 @@ def test_raw_study( # Get the synthesis of the study and compare with the expected file res = client.get( - f"/v1/studies/{study_id}/synthesis", + f"/v1/studies/{internal_study_id}/synthesis", headers={"Authorization": f"Bearer {user_access_token}"}, ) assert res.status_code == 200, res.json() @@ -53,7 +53,7 @@ def test_raw_study( # Ensure the duration is relatively short start = time.time() res = client.get( - f"/v1/studies/{study_id}/synthesis", + f"/v1/studies/{internal_study_id}/synthesis", headers={"Authorization": f"Bearer {user_access_token}"}, ) assert res.status_code == 200, res.json() @@ -64,7 +64,7 @@ def test_variant_study( self, client: TestClient, user_access_token: str, - study_id: str, + internal_study_id: str, ) -> None: """ This test verifies that we can retrieve and modify the synthesis of a VARIANT study. @@ -72,7 +72,7 @@ def test_variant_study( """ # First, we create a copy of the study, and we convert it to a managed study. res = client.post( - f"/v1/studies/{study_id}/copy", + f"/v1/studies/{internal_study_id}/copy", headers={"Authorization": f"Bearer {user_access_token}"}, params={"dest": "default", "with_outputs": False, "use_task": False}, # type: ignore ) diff --git a/tests/integration/studies_blueprint/test_update_tags.py b/tests/integration/studies_blueprint/test_update_tags.py index a65ece2f11..9ee37c7d70 100644 --- a/tests/integration/studies_blueprint/test_update_tags.py +++ b/tests/integration/studies_blueprint/test_update_tags.py @@ -10,7 +10,7 @@ def test_update_tags( self, client: TestClient, user_access_token: str, - study_id: str, + internal_study_id: str, ) -> None: """ This test verifies that we can update the tags of a study. @@ -20,7 +20,7 @@ def test_update_tags( # Classic usage: set some tags to a study study_tags = ["Tag1", "Tag2"] res = client.put( - f"/v1/studies/{study_id}", + f"/v1/studies/{internal_study_id}", headers={"Authorization": f"Bearer {user_access_token}"}, json={"tags": study_tags}, ) @@ -33,7 +33,7 @@ def test_update_tags( # - "Tag2" is replaced by "Tag3". study_tags = ["tag1", "Tag3"] res = client.put( - f"/v1/studies/{study_id}", + f"/v1/studies/{internal_study_id}", headers={"Authorization": f"Bearer {user_access_token}"}, json={"tags": study_tags}, ) @@ -46,7 +46,7 @@ def test_update_tags( # consecutive whitespaces are replaced by a single one. study_tags = [" \xa0Foo \t Bar \n ", " \t Baz\xa0\xa0"] res = client.put( - f"/v1/studies/{study_id}", + f"/v1/studies/{internal_study_id}", headers={"Authorization": f"Bearer {user_access_token}"}, json={"tags": study_tags}, ) @@ -57,7 +57,7 @@ def test_update_tags( # We can have symbols in the tags study_tags = ["Foo-Bar", ":Baz%"] res = client.put( - f"/v1/studies/{study_id}", + f"/v1/studies/{internal_study_id}", headers={"Authorization": f"Bearer {user_access_token}"}, json={"tags": study_tags}, ) @@ -69,12 +69,12 @@ def test_update_tags__invalid_tags( self, client: TestClient, user_access_token: str, - study_id: str, + internal_study_id: str, ) -> None: # We cannot have empty tags study_tags = [""] res = client.put( - f"/v1/studies/{study_id}", + f"/v1/studies/{internal_study_id}", headers={"Authorization": f"Bearer {user_access_token}"}, json={"tags": study_tags}, ) @@ -86,7 +86,7 @@ def test_update_tags__invalid_tags( study_tags = ["very long tags, very long tags, very long tags"] assert len(study_tags[0]) > 40 res = client.put( - f"/v1/studies/{study_id}", + f"/v1/studies/{internal_study_id}", headers={"Authorization": f"Bearer {user_access_token}"}, json={"tags": study_tags}, ) diff --git a/tests/integration/study_data_blueprint/test_advanced_parameters.py b/tests/integration/study_data_blueprint/test_advanced_parameters.py index b126349335..90eee2a88f 100644 --- a/tests/integration/study_data_blueprint/test_advanced_parameters.py +++ b/tests/integration/study_data_blueprint/test_advanced_parameters.py @@ -19,11 +19,11 @@ def test_get_advanced_parameters_values( self, client: TestClient, user_access_token: str, - study_id: str, + internal_study_id: str, ): """Check `get_advanced_parameters_form_values` end point""" res = client.get( - f"/v1/studies/{study_id}/config/advancedparameters/form", + f"/v1/studies/{internal_study_id}/config/advancedparameters/form", headers={"Authorization": f"Bearer {user_access_token}"}, ) assert res.status_code == HTTPStatus.OK, res.json() @@ -55,12 +55,12 @@ def test_get_advanced_parameters_values( @pytest.mark.parametrize("study_version", [0, 880]) def test_set_advanced_parameters_values( - self, client: TestClient, user_access_token: str, study_id: str, study_version: int + self, client: TestClient, user_access_token: str, internal_study_id: str, study_version: int ): """Check `set_advanced_parameters_values` end point""" obj = {"initialReservoirLevels": "hot start"} res = client.put( - f"/v1/studies/{study_id}/config/advancedparameters/form", + f"/v1/studies/{internal_study_id}/config/advancedparameters/form", headers={"Authorization": f"Bearer {user_access_token}"}, json=obj, ) @@ -70,7 +70,7 @@ def test_set_advanced_parameters_values( if study_version: res = client.put( - f"/v1/studies/{study_id}/upgrade", + f"/v1/studies/{internal_study_id}/upgrade", headers={"Authorization": f"Bearer {user_access_token}"}, params={"target_version": study_version}, ) @@ -82,7 +82,7 @@ def test_set_advanced_parameters_values( obj = {"unitCommitmentMode": "milp"} res = client.put( - f"/v1/studies/{study_id}/config/advancedparameters/form", + f"/v1/studies/{internal_study_id}/config/advancedparameters/form", headers={"Authorization": f"Bearer {user_access_token}"}, json=obj, ) diff --git a/tests/integration/study_data_blueprint/test_binding_constraints.py b/tests/integration/study_data_blueprint/test_binding_constraints.py index af537fcc7f..7e3c613e16 100644 --- a/tests/integration/study_data_blueprint/test_binding_constraints.py +++ b/tests/integration/study_data_blueprint/test_binding_constraints.py @@ -46,6 +46,7 @@ def test_constraint_id__link(self) -> None: offset=123, data=LinkTerm(area1="Area 1", area2="Area 2"), ) + assert term.data is not None assert term.generate_id() == term.data.generate_id() def test_constraint_id__cluster(self) -> None: @@ -55,6 +56,7 @@ def test_constraint_id__cluster(self) -> None: offset=123, data=ClusterTerm(area="Area 1", cluster="Cluster X"), ) + assert term.data is not None assert term.generate_id() == term.data.generate_id() def test_constraint_id__other(self) -> None: @@ -66,18 +68,11 @@ def test_constraint_id__other(self) -> None: assert term.generate_id() == "foo" -def _upload_matrix( - client: TestClient, user_access_token: str, study_id: str, matrix_path: str, df: pd.DataFrame -) -> None: +def _upload_matrix(client: TestClient, study_id: str, matrix_path: str, df: pd.DataFrame) -> None: tsv = io.BytesIO() df.to_csv(tsv, sep="\t", index=False, header=False) tsv.seek(0) - res = client.put( - f"/v1/studies/{study_id}/raw", - params={"path": matrix_path}, - headers={"Authorization": f"Bearer {user_access_token}"}, - files={"file": tsv}, - ) + res = client.put(f"/v1/studies/{study_id}/raw", params={"path": matrix_path}, files={"file": tsv}) res.raise_for_status() @@ -89,72 +84,43 @@ class TestBindingConstraints: @pytest.mark.parametrize("study_type", ["raw", "variant"]) def test_lifecycle__nominal(self, client: TestClient, user_access_token: str, study_type: str) -> None: - user_headers = {"Authorization": f"Bearer {user_access_token}"} + client.headers = {"Authorization": f"Bearer {user_access_token}"} # type: ignore # ============================= # STUDY PREPARATION # ============================= # Create a Study - res = client.post( - "/v1/studies", - headers=user_headers, - params={"name": "foo", "version": "860"}, - ) + res = client.post("/v1/studies", params={"name": "foo", "version": "860"}) assert res.status_code == 201, res.json() study_id = res.json() # Create Areas - res = client.post( - f"/v1/studies/{study_id}/areas", - headers=user_headers, - json={ - "name": "Area 1", - "type": "AREA", - }, - ) + res = client.post(f"/v1/studies/{study_id}/areas", json={"name": "Area 1", "type": "AREA"}) assert res.status_code == 200, res.json() area1_id = res.json()["id"] assert area1_id == "area 1" - res = client.post( - f"/v1/studies/{study_id}/areas", - headers=user_headers, - json={ - "name": "Area 2", - "type": "AREA", - }, - ) + res = client.post(f"/v1/studies/{study_id}/areas", json={"name": "Area 2", "type": "AREA"}) assert res.status_code == 200, res.json() area2_id = res.json()["id"] assert area2_id == "area 2" # Create a link between the two areas - res = client.post( - f"/v1/studies/{study_id}/links", - headers=user_headers, - json={ - "area1": area1_id, - "area2": area2_id, - }, - ) + res = client.post(f"/v1/studies/{study_id}/links", json={"area1": area1_id, "area2": area2_id}) assert res.status_code == 200, res.json() # Create a cluster in area1 res = client.post( f"/v1/studies/{study_id}/areas/{area1_id}/clusters/thermal", - headers=user_headers, - json={ - "name": "Cluster 1", - "group": "Nuclear", - }, + json={"name": "Cluster 1", "group": "Nuclear"}, ) assert res.status_code == 200, res.json() cluster_id = res.json()["id"] assert cluster_id == "Cluster 1" # Get clusters list to check created cluster in area1 - res = client.get(f"/v1/studies/{study_id}/areas/{area1_id}/clusters/thermal", headers=user_headers) + res = client.get(f"/v1/studies/{study_id}/areas/{area1_id}/clusters/thermal") clusters_list = res.json() assert res.status_code == 200, res.json() assert len(clusters_list) == 1 @@ -164,11 +130,7 @@ def test_lifecycle__nominal(self, client: TestClient, user_access_token: str, st if study_type == "variant": # Create Variant - res = client.post( - f"/v1/studies/{study_id}/variants", - headers=user_headers, - params={"name": "Variant 1"}, - ) + res = client.post(f"/v1/studies/{study_id}/variants", params={"name": "Variant 1"}) assert res.status_code in {200, 201}, res.json() study_id = res.json() @@ -192,7 +154,6 @@ def test_lifecycle__nominal(self, client: TestClient, user_access_token: str, st }, } ], - headers=user_headers, ) assert res.status_code in {200, 201}, res.json() @@ -211,7 +172,6 @@ def test_lifecycle__nominal(self, client: TestClient, user_access_token: str, st }, } ], - headers=user_headers, ) assert res.status_code in {200, 201}, res.json() @@ -226,12 +186,11 @@ def test_lifecycle__nominal(self, client: TestClient, user_access_token: str, st "terms": [], "comments": "New API", }, - headers=user_headers, ) assert res.status_code in {200, 201}, res.json() # Get Binding Constraint list - res = client.get(f"/v1/studies/{study_id}/bindingconstraints", headers=user_headers) + res = client.get(f"/v1/studies/{study_id}/bindingconstraints") binding_constraints_list = res.json() assert res.status_code == 200, res.json() assert len(binding_constraints_list) == 3 @@ -278,7 +237,7 @@ def test_lifecycle__nominal(self, client: TestClient, user_access_token: str, st bc_id = binding_constraints_list[0]["id"] # Asserts binding constraint configuration is valid. - res = client.get(f"/v1/studies/{study_id}/constraint-groups", headers=user_headers) + res = client.get(f"/v1/studies/{study_id}/constraint-groups") assert res.status_code == 200, res.json() # ============================= @@ -293,7 +252,6 @@ def test_lifecycle__nominal(self, client: TestClient, user_access_token: str, st "offset": 2, "data": {"area1": area1_id, "area2": area2_id}, }, - headers=user_headers, ) assert res.status_code == 200, res.json() @@ -303,21 +261,14 @@ def test_lifecycle__nominal(self, client: TestClient, user_access_token: str, st json={ "weight": 1, "offset": 2, - "data": { - "area": area1_id, - "cluster": cluster_id, - }, - # NOTE: cluster_id in term data can be uppercase, but it must be lowercase in the returned ini configuration file + "data": {"area": area1_id, "cluster": cluster_id}, + # NOTE: cluster_id in term data can be uppercase, but it must be lowercase in the INI file }, - headers=user_headers, ) assert res.status_code == 200, res.json() # Get binding constraints list to check added terms - res = client.get( - f"/v1/studies/{study_id}/bindingconstraints/{bc_id}", - headers=user_headers, - ) + res = client.get(f"/v1/studies/{study_id}/bindingconstraints/{bc_id}") assert res.status_code == 200, res.json() binding_constraint = res.json() constraint_terms = binding_constraint["terms"] @@ -340,19 +291,12 @@ def test_lifecycle__nominal(self, client: TestClient, user_access_token: str, st # Update constraint cluster term with uppercase cluster_id res = client.put( f"/v1/studies/{study_id}/bindingconstraints/{bc_id}/term", - json={ - "id": f"{area1_id}.{cluster_id}", - "weight": 3, - }, - headers=user_headers, + json={"id": f"{area1_id}.{cluster_id}", "weight": 3}, ) assert res.status_code == 200, res.json() # Check updated terms, cluster_id should be lowercase in the returned configuration - res = client.get( - f"/v1/studies/{study_id}/bindingconstraints/{bc_id}", - headers=user_headers, - ) + res = client.get(f"/v1/studies/{study_id}/bindingconstraints/{bc_id}") assert res.status_code == 200, res.json() binding_constraint = res.json() constraint_terms = binding_constraint["terms"] @@ -376,7 +320,6 @@ def test_lifecycle__nominal(self, client: TestClient, user_access_token: str, st res = client.put( f"/v1/studies/{study_id}/bindingconstraints/{bc_id}/term", json={"id": f"{area1_id}.!!invalid#cluster%%", "weight": 4}, - headers=user_headers, ) assert res.status_code == 404, res.json() exception = res.json()["exception"] @@ -388,11 +331,7 @@ def test_lifecycle__nominal(self, client: TestClient, user_access_token: str, st # Update constraint cluster term with empty data res = client.put( f"/v1/studies/{study_id}/bindingconstraints/{bc_id}/term", - json={ - "id": f"{area1_id}.{cluster_id}", - "data": {}, - }, - headers=user_headers, + json={"id": f"{area1_id}.{cluster_id}", "data": {}}, ) assert res.status_code == 422, res.json() assert res.json() == { @@ -402,17 +341,11 @@ def test_lifecycle__nominal(self, client: TestClient, user_access_token: str, st } # Remove Constraint term - res = client.delete( - f"/v1/studies/{study_id}/bindingconstraints/{bc_id}/term/{area1_id}%{area2_id}", - headers=user_headers, - ) + res = client.delete(f"/v1/studies/{study_id}/bindingconstraints/{bc_id}/term/{area1_id}%{area2_id}") assert res.status_code == 200, res.json() # Check updated terms, the deleted term should no longer exist. - res = client.get( - f"/v1/studies/{study_id}/bindingconstraints/{bc_id}", - headers=user_headers, - ) + res = client.get(f"/v1/studies/{study_id}/bindingconstraints/{bc_id}") assert res.status_code == 200, res.json() binding_constraint = res.json() constraint_terms = binding_constraint["terms"] @@ -427,17 +360,10 @@ def test_lifecycle__nominal(self, client: TestClient, user_access_token: str, st assert constraint_terms == expected # Update random field, shouldn't remove the term. - res = client.put( - f"v1/studies/{study_id}/bindingconstraints/{bc_id}", - json={"enabled": False}, - headers=user_headers, - ) + res = client.put(f"v1/studies/{study_id}/bindingconstraints/{bc_id}", json={"enabled": False}) assert res.status_code == 200, res.json() - res = client.get( - f"/v1/studies/{study_id}/bindingconstraints/{bc_id}", - headers=user_headers, - ) + res = client.get(f"/v1/studies/{study_id}/bindingconstraints/{bc_id}") assert res.status_code == 200, res.json() binding_constraint = res.json() constraint_terms = binding_constraint["terms"] @@ -449,27 +375,19 @@ def test_lifecycle__nominal(self, client: TestClient, user_access_token: str, st # Update element of Binding constraint new_comment = "We made it !" - res = client.put( - f"v1/studies/{study_id}/bindingconstraints/{bc_id}", - json={"comments": new_comment}, - headers=user_headers, - ) + res = client.put(f"v1/studies/{study_id}/bindingconstraints/{bc_id}", json={"comments": new_comment}) assert res.status_code == 200 assert res.json()["comments"] == new_comment # The user change the timeStep to daily instead of hourly. # We must check that the matrix is a daily/weekly matrix. - res = client.put( - f"/v1/studies/{study_id}/bindingconstraints/{bc_id}", - json={"timeStep": "daily"}, - headers=user_headers, - ) + res = client.put(f"/v1/studies/{study_id}/bindingconstraints/{bc_id}", json={"timeStep": "daily"}) assert res.status_code == 200, res.json() assert res.json()["timeStep"] == "daily" # Check that the command corresponds to a change in `time_step` if study_type == "variant": - res = client.get(f"/v1/studies/{study_id}/commands", headers=user_headers) + res = client.get(f"/v1/studies/{study_id}/commands") commands = res.json() args = commands[-1]["args"] assert args["time_step"] == "daily" @@ -479,7 +397,6 @@ def test_lifecycle__nominal(self, client: TestClient, user_access_token: str, st res = client.get( f"/v1/studies/{study_id}/raw", params={"path": f"input/bindingconstraints/{bc_id}", "depth": 1, "formatted": True}, # type: ignore - headers=user_headers, ) assert res.status_code == 200, res.json() dataframe = res.json() @@ -501,7 +418,6 @@ def test_lifecycle__nominal(self, client: TestClient, user_access_token: str, st "terms": [], "comments": "New API", }, - headers=user_headers, ) assert res.status_code == 400, res.json() assert res.json() == { @@ -520,7 +436,6 @@ def test_lifecycle__nominal(self, client: TestClient, user_access_token: str, st "terms": [], "comments": "New API", }, - headers=user_headers, ) assert res.status_code == 400, res.json() assert res.json() == { @@ -539,7 +454,6 @@ def test_lifecycle__nominal(self, client: TestClient, user_access_token: str, st "terms": [], "comments": "", }, - headers=user_headers, ) assert res.status_code == 409, res.json() @@ -556,7 +470,6 @@ def test_lifecycle__nominal(self, client: TestClient, user_access_token: str, st "values": [[]], "less_term_matrix": [[]], }, - headers=user_headers, ) assert res.status_code == 422, res.json() description = res.json()["description"] @@ -577,7 +490,6 @@ def test_lifecycle__nominal(self, client: TestClient, user_access_token: str, st "comments": "Incoherent matrix with version", "lessTermMatrix": [[]], }, - headers=user_headers, ) assert res.status_code == 422, res.json() description = res.json()["description"] @@ -594,11 +506,7 @@ def test_lifecycle__nominal(self, client: TestClient, user_access_token: str, st "comments": "Creation with matrix", "values": wrong_matrix.tolist(), } - res = client.post( - f"/v1/studies/{study_id}/bindingconstraints", - json=wrong_request_args, - headers=user_headers, - ) + res = client.post(f"/v1/studies/{study_id}/bindingconstraints", json=wrong_request_args) assert res.status_code == 422, res.json() exception = res.json()["exception"] description = res.json()["description"] @@ -607,7 +515,7 @@ def test_lifecycle__nominal(self, client: TestClient, user_access_token: str, st assert "(366, 3)" in description # Delete a fake binding constraint - res = client.delete(f"/v1/studies/{study_id}/bindingconstraints/fake_bc", headers=user_headers) + res = client.delete(f"/v1/studies/{study_id}/bindingconstraints/fake_bc") assert res.status_code == 404, res.json() assert res.json()["exception"] == "BindingConstraintNotFound" assert res.json()["description"] == "Binding constraint 'fake_bc' not found" @@ -617,7 +525,6 @@ def test_lifecycle__nominal(self, client: TestClient, user_access_token: str, st res = client.put( f"/v1/studies/{study_id}/bindingconstraints/binding_constraint_2", json={"group": grp_name}, - headers=user_headers, ) assert res.status_code == 422, res.json() assert res.json()["exception"] == "InvalidFieldForVersionError" @@ -630,82 +537,48 @@ def test_lifecycle__nominal(self, client: TestClient, user_access_token: str, st res = client.put( f"/v1/studies/{study_id}/bindingconstraints/binding_constraint_2", json={"less_term_matrix": [[]]}, - headers=user_headers, ) assert res.status_code == 422, res.json() assert res.json()["exception"] == "InvalidFieldForVersionError" assert res.json()["description"] == "You cannot fill a 'matrix_term' as these values refer to v8.7+ studies" @pytest.mark.parametrize("study_type", ["raw", "variant"]) - def test_for_version_870(self, client: TestClient, admin_access_token: str, study_type: str) -> None: - admin_headers = {"Authorization": f"Bearer {admin_access_token}"} + def test_for_version_870(self, client: TestClient, user_access_token: str, study_type: str) -> None: + client.headers = {"Authorization": f"Bearer {user_access_token}"} # type: ignore # ============================= # STUDY PREPARATION # ============================= - res = client.post( - "/v1/studies", - headers=admin_headers, - params={"name": "foo"}, - ) + res = client.post("/v1/studies", params={"name": "foo"}) assert res.status_code == 201, res.json() study_id = res.json() if study_type == "variant": # Create Variant - res = client.post( - f"/v1/studies/{study_id}/variants", - headers=admin_headers, - params={"name": "Variant 1"}, - ) + res = client.post(f"/v1/studies/{study_id}/variants", params={"name": "Variant 1"}) assert res.status_code in {200, 201} study_id = res.json() # Create Areas - res = client.post( - f"/v1/studies/{study_id}/areas", - headers=admin_headers, - json={ - "name": "Area 1", - "type": "AREA", - }, - ) + res = client.post(f"/v1/studies/{study_id}/areas", json={"name": "Area 1", "type": "AREA"}) assert res.status_code == 200, res.json() area1_id = res.json()["id"] assert area1_id == "area 1" - res = client.post( - f"/v1/studies/{study_id}/areas", - headers=admin_headers, - json={ - "name": "Area 2", - "type": "AREA", - }, - ) + res = client.post(f"/v1/studies/{study_id}/areas", json={"name": "Area 2", "type": "AREA"}) assert res.status_code == 200, res.json() area2_id = res.json()["id"] assert area2_id == "area 2" # Create a link between the two areas - res = client.post( - f"/v1/studies/{study_id}/links", - headers=admin_headers, - json={ - "area1": area1_id, - "area2": area2_id, - }, - ) + res = client.post(f"/v1/studies/{study_id}/links", json={"area1": area1_id, "area2": area2_id}) assert res.status_code == 200, res.json() # Create a cluster in area1 res = client.post( f"/v1/studies/{study_id}/areas/{area1_id}/clusters/thermal", - headers=admin_headers, - json={ - "name": "Cluster 1", - "group": "Nuclear", - }, + json={"name": "Cluster 1", "group": "Nuclear"}, ) assert res.status_code == 200, res.json() cluster_id = res.json()["id"] @@ -718,11 +591,7 @@ def test_for_version_870(self, client: TestClient, admin_access_token: str, stud # Creation of a bc without group bc_id_wo_group = "binding_constraint_1" args = {"enabled": True, "timeStep": "hourly", "operator": "less", "terms": [], "comments": "New API"} - res = client.post( - f"/v1/studies/{study_id}/bindingconstraints", - json={"name": bc_id_wo_group, **args}, - headers=admin_headers, - ) + res = client.post(f"/v1/studies/{study_id}/bindingconstraints", json={"name": bc_id_wo_group, **args}) assert res.status_code in {200, 201} assert res.json()["group"] == "default" @@ -731,7 +600,6 @@ def test_for_version_870(self, client: TestClient, admin_access_token: str, stud res = client.post( f"/v1/studies/{study_id}/bindingconstraints", json={"name": bc_id_w_group, "group": "specific_grp", **args}, - headers=admin_headers, ) assert res.status_code in {200, 201} assert res.json()["group"] == "specific_grp" @@ -742,12 +610,11 @@ def test_for_version_870(self, client: TestClient, admin_access_token: str, stud res = client.post( f"/v1/studies/{study_id}/bindingconstraints", json={"name": bc_id_w_matrix, "less_term_matrix": matrix_lt3.tolist(), **args}, - headers=admin_headers, ) assert res.status_code in {200, 201}, res.json() if study_type == "variant": - res = client.get(f"/v1/studies/{study_id}/commands", headers=admin_headers) + res = client.get(f"/v1/studies/{study_id}/commands") last_cmd_args = res.json()[-1]["args"] less_term_matrix = last_cmd_args["less_term_matrix"] equal_term_matrix = last_cmd_args["equal_term_matrix"] @@ -756,10 +623,10 @@ def test_for_version_870(self, client: TestClient, admin_access_token: str, stud # Check that raw matrices are created for term in ["lt", "gt", "eq"]: + path = f"input/bindingconstraints/{bc_id_w_matrix}_{term}" res = client.get( f"/v1/studies/{study_id}/raw", - params={"path": f"input/bindingconstraints/{bc_id_w_matrix}_{term}", "depth": 1, "formatted": True}, # type: ignore - headers=admin_headers, + params={"path": path, "depth": 1, "formatted": True}, # type: ignore ) assert res.status_code == 200, res.json() data = res.json()["data"] @@ -779,7 +646,6 @@ def test_for_version_870(self, client: TestClient, admin_access_token: str, stud {"weight": 1, "offset": 2, "data": {"area1": area1_id, "area2": area2_id}}, {"weight": 1, "offset": 2, "data": {"area": area1_id, "cluster": cluster_id}}, ], - headers=admin_headers, ) assert res.status_code == 200, res.json() @@ -787,7 +653,6 @@ def test_for_version_870(self, client: TestClient, admin_access_token: str, stud res = client.post( f"/v1/studies/{study_id}/bindingconstraints/{bc_id_w_group}/terms", json=[{"weight": 1, "offset": 2}], - headers=admin_headers, ) assert res.status_code == 422, res.json() exception = res.json()["exception"] @@ -800,7 +665,6 @@ def test_for_version_870(self, client: TestClient, admin_access_token: str, stud res = client.post( f"/v1/studies/{study_id}/bindingconstraints/{bc_id_w_group}/terms", json=[{"weight": 99, "offset": 0, "data": {"area1": area1_id, "area2": area2_id}}], - headers=admin_headers, ) assert res.status_code == 409, res.json() exception = res.json()["exception"] @@ -810,10 +674,7 @@ def test_for_version_870(self, client: TestClient, admin_access_token: str, stud assert f"{area1_id}%{area2_id}" in description, "Error message should contain the duplicate term ID" # Get binding constraints list to check added terms - res = client.get( - f"/v1/studies/{study_id}/bindingconstraints/{bc_id_w_group}", - headers=admin_headers, - ) + res = client.get(f"/v1/studies/{study_id}/bindingconstraints/{bc_id_w_group}") assert res.status_code == 200, res.json() binding_constraint = res.json() constraint_terms = binding_constraint["terms"] @@ -837,22 +698,22 @@ def test_for_version_870(self, client: TestClient, admin_access_token: str, stud res = client.put( f"/v1/studies/{study_id}/bindingconstraints/{bc_id_w_group}/terms", json=[ - {"id": f"{area1_id}%{area2_id}", "weight": 4.4, "offset": 1}, + { + "id": f"{area1_id}%{area2_id}", + "weight": 4.4, + "offset": 1, + }, { "id": f"{area1_id}.{cluster_id}", "weight": 5.1, "data": {"area": area1_id, "cluster": cluster_id}, }, ], - headers=admin_headers, ) assert res.status_code == 200, res.json() # Asserts terms were updated - res = client.get( - f"/v1/studies/{study_id}/bindingconstraints/{bc_id_w_group}", - headers=admin_headers, - ) + res = client.get(f"/v1/studies/{study_id}/bindingconstraints/{bc_id_w_group}") assert res.status_code == 200, res.json() binding_constraint = res.json() constraint_terms = binding_constraint["terms"] @@ -881,7 +742,6 @@ def test_for_version_870(self, client: TestClient, admin_access_token: str, stud res = client.put( f"/v1/studies/{study_id}/bindingconstraints/{bc_id_w_matrix}", json={"group": grp_name}, - headers=admin_headers, ) assert res.status_code == 200, res.json() assert res.json()["group"] == grp_name @@ -890,14 +750,12 @@ def test_for_version_870(self, client: TestClient, admin_access_token: str, stud res = client.put( f"/v1/studies/{study_id}/bindingconstraints/{bc_id_w_matrix}", json={"greater_term_matrix": matrix_lt3.tolist()}, - headers=admin_headers, ) assert res.status_code == 200, res.json() res = client.get( f"/v1/studies/{study_id}/raw", params={"path": f"input/bindingconstraints/{bc_id_w_matrix}_gt"}, - headers=admin_headers, ) assert res.status_code == 200, res.json() assert res.json()["data"] == matrix_lt3.tolist() @@ -907,13 +765,12 @@ def test_for_version_870(self, client: TestClient, admin_access_token: str, stud res = client.put( f"/v1/studies/{study_id}/bindingconstraints/{bc_id_w_matrix}", json={"timeStep": "daily"}, - headers=admin_headers, ) assert res.status_code == 200, res.json() if study_type == "variant": # Check the last command is a change on `time_step` field only - res = client.get(f"/v1/studies/{study_id}/commands", headers=admin_headers) + res = client.get(f"/v1/studies/{study_id}/commands") commands = res.json() command_args = commands[-1]["args"] assert command_args["time_step"] == "daily" @@ -935,7 +792,6 @@ def test_for_version_870(self, client: TestClient, admin_access_token: str, stud "depth": 1, "formatted": True, }, # type: ignore - headers=admin_headers, ) assert res.status_code == 200, res.json() assert res.json()["data"] == expected_matrix.tolist() @@ -945,11 +801,11 @@ def test_for_version_870(self, client: TestClient, admin_access_token: str, stud # ============================= # Delete a binding constraint - res = client.delete(f"/v1/studies/{study_id}/bindingconstraints/{bc_id_w_group}", headers=admin_headers) + res = client.delete(f"/v1/studies/{study_id}/bindingconstraints/{bc_id_w_group}") assert res.status_code == 200, res.json() # Asserts that the deletion worked - res = client.get(f"/v1/studies/{study_id}/bindingconstraints", headers=admin_headers) + res = client.get(f"/v1/studies/{study_id}/bindingconstraints") assert len(res.json()) == 2 # ============================= @@ -968,7 +824,6 @@ def test_for_version_870(self, client: TestClient, admin_access_token: str, stud "comments": "New API", "values": [[]], }, - headers=admin_headers, ) assert res.status_code == 422, res.json() assert res.json()["description"] == "You cannot fill 'values' as it refers to the matrix before v8.7" @@ -977,7 +832,6 @@ def test_for_version_870(self, client: TestClient, admin_access_token: str, stud res = client.put( f"/v1/studies/{study_id}/bindingconstraints/{bc_id_w_matrix}", json={"values": [[]]}, - headers=admin_headers, ) assert res.status_code == 422, res.json() assert res.json()["exception"] == "InvalidFieldForVersionError" @@ -995,7 +849,6 @@ def test_for_version_870(self, client: TestClient, admin_access_token: str, stud "greater_term_matrix": matrix_gt2.tolist(), **args, }, - headers=admin_headers, ) assert res.status_code == 422, res.json() exception = res.json()["exception"] @@ -1020,7 +873,6 @@ def test_for_version_870(self, client: TestClient, admin_access_token: str, stud "group": "Group 1", **args, }, - headers=admin_headers, ) assert res.status_code in {200, 201}, res.json() first_bc_id = res.json()["id"] @@ -1029,17 +881,13 @@ def test_for_version_870(self, client: TestClient, admin_access_token: str, stud random_matrix = pd.DataFrame(generator.integers(0, 10, size=(4, 1))) _upload_matrix( client, - admin_access_token, study_id, f"input/bindingconstraints/{first_bc_id}_gt", random_matrix, ) # Validation should fail - res = client.get( - f"/v1/studies/{study_id}/constraint-groups/Group 1/validate", - headers=admin_headers, - ) + res = client.get(f"/v1/studies/{study_id}/constraint-groups/Group 1/validate") assert res.status_code == 422 obj = res.json() assert obj["exception"] == "WrongMatrixHeightError" @@ -1049,7 +897,6 @@ def test_for_version_870(self, client: TestClient, admin_access_token: str, stud res = client.put( f"/v1/studies/{study_id}/bindingconstraints/{first_bc_id}", json={"greater_term_matrix": matrix_lt3.tolist()}, - headers=admin_headers, ) assert res.status_code in {200, 201}, res.json() @@ -1059,10 +906,7 @@ def test_for_version_870(self, client: TestClient, admin_access_token: str, stud # # Asserts everything is ok. - res = client.get( - f"/v1/studies/{study_id}/constraint-groups/Group 1/validate", - headers=admin_headers, - ) + res = client.get(f"/v1/studies/{study_id}/constraint-groups/Group 1/validate") assert res.status_code == 200, res.json() matrix_gt4 = np.ones((8784, 4)) # Wrong number of columns @@ -1074,13 +918,12 @@ def test_for_version_870(self, client: TestClient, admin_access_token: str, stud "group": "group 1", # Same group, but different case **args, }, - headers=admin_headers, ) assert res.status_code in {200, 201}, res.json() second_bc_id = res.json()["id"] # validate the BC group "Group 1" - res = client.get(f"/v1/studies/{study_id}/constraint-groups/Group 1/validate", headers=admin_headers) + res = client.get(f"/v1/studies/{study_id}/constraint-groups/Group 1/validate") assert res.status_code == 422, res.json() assert res.json()["exception"] == "MatrixWidthMismatchError" description = res.json()["description"] @@ -1091,7 +934,6 @@ def test_for_version_870(self, client: TestClient, admin_access_token: str, stud res = client.put( f"/v1/studies/{study_id}/bindingconstraints/{second_bc_id}", json={"greater_term_matrix": matrix_lt3.tolist()}, - headers=admin_headers, ) assert res.status_code in {200, 201}, res.json() @@ -1111,7 +953,6 @@ def test_for_version_870(self, client: TestClient, admin_access_token: str, stud "group": "Group 2", **args, }, - headers=admin_headers, ) assert res.status_code in {200, 201}, res.json() third_bd_id = res.json()["id"] @@ -1119,13 +960,12 @@ def test_for_version_870(self, client: TestClient, admin_access_token: str, stud res = client.put( f"v1/studies/{study_id}/bindingconstraints/{third_bd_id}", json={"group": "Group 1"}, - headers=admin_headers, ) # This should succeed but cause the validation endpoint to fail. assert res.status_code in {200, 201}, res.json() # validate the BC group "Group 1" - res = client.get(f"/v1/studies/{study_id}/constraint-groups/Group 1/validate", headers=admin_headers) + res = client.get(f"/v1/studies/{study_id}/constraint-groups/Group 1/validate") assert res.status_code == 422, res.json() assert res.json()["exception"] == "MatrixWidthMismatchError" description = res.json()["description"] @@ -1136,7 +976,6 @@ def test_for_version_870(self, client: TestClient, admin_access_token: str, stud res = client.put( f"/v1/studies/{study_id}/bindingconstraints/{third_bd_id}", json={"greater_term_matrix": matrix_lt3.tolist()}, - headers=admin_headers, ) assert res.status_code in {200, 201}, res.json() @@ -1149,24 +988,22 @@ def test_for_version_870(self, client: TestClient, admin_access_token: str, stud res = client.put( f"v1/studies/{study_id}/bindingconstraints/{second_bc_id}", json={"group": "Group 2"}, - headers=admin_headers, ) assert res.status_code in {200, 201}, res.json() # validate the "Group 2": for the moment the BC is valid - res = client.get(f"/v1/studies/{study_id}/constraint-groups/Group 2/validate", headers=admin_headers) + res = client.get(f"/v1/studies/{study_id}/constraint-groups/Group 2/validate") assert res.status_code in {200, 201}, res.json() res = client.put( f"v1/studies/{study_id}/bindingconstraints/{second_bc_id}", json={"greater_term_matrix": matrix_gt4.tolist()}, - headers=admin_headers, ) # This should succeed but cause the validation endpoint to fail. assert res.status_code in {200, 201}, res.json() # Collect all the binding constraints groups - res = client.get(f"/v1/studies/{study_id}/constraint-groups", headers=admin_headers) + res = client.get(f"/v1/studies/{study_id}/constraint-groups") assert res.status_code in {200, 201}, res.json() groups = res.json() assert set(groups) == {"default", "random_grp", "Group 1", "Group 2"} @@ -1186,7 +1023,7 @@ def test_for_version_870(self, client: TestClient, admin_access_token: str, stud ] # Validate all binding constraints groups - res = client.get(f"/v1/studies/{study_id}/constraint-groups/validate-all", headers=admin_headers) + res = client.get(f"/v1/studies/{study_id}/constraint-groups/validate-all") assert res.status_code == 422, res.json() exception = res.json()["exception"] description = res.json()["description"] diff --git a/tests/integration/study_data_blueprint/test_config_general.py b/tests/integration/study_data_blueprint/test_config_general.py index e64c50aa83..3084a86b4e 100644 --- a/tests/integration/study_data_blueprint/test_config_general.py +++ b/tests/integration/study_data_blueprint/test_config_general.py @@ -17,11 +17,11 @@ def test_get_general_form_values( self, client: TestClient, user_access_token: str, - study_id: str, + internal_study_id: str, ): """Check `set_general_form_values` end point""" res = client.get( - f"/v1/studies/{study_id}/config/general/form", + f"/v1/studies/{internal_study_id}/config/general/form", headers={"Authorization": f"Bearer {user_access_token}"}, ) assert res.status_code == HTTPStatus.OK, res.json() @@ -49,12 +49,12 @@ def test_set_general_form_values( self, client: TestClient, user_access_token: str, - study_id: str, + internal_study_id: str, ): """Check `set_general_form_values` end point""" obj = {"horizon": 2020} res = client.put( - f"/v1/studies/{study_id}/config/general/form", + f"/v1/studies/{internal_study_id}/config/general/form", headers={"Authorization": f"Bearer {user_access_token}"}, json=obj, ) diff --git a/tests/integration/study_data_blueprint/test_edit_matrix.py b/tests/integration/study_data_blueprint/test_edit_matrix.py index cb9e4702e9..28953d713b 100644 --- a/tests/integration/study_data_blueprint/test_edit_matrix.py +++ b/tests/integration/study_data_blueprint/test_edit_matrix.py @@ -241,7 +241,7 @@ def test_edit_matrix__thermal_cluster( self, client: TestClient, user_access_token: str, - study_id: str, + internal_study_id: str, ): # Given the following Area area_id = "fr" @@ -249,7 +249,7 @@ def test_edit_matrix__thermal_cluster( # Create a cluster cluster_id = "cluster 1" res = client.post( - f"/v1/studies/{study_id}/commands", + f"/v1/studies/{internal_study_id}/commands", headers={"Authorization": f"Bearer {user_access_token}"}, json=[ { @@ -273,7 +273,7 @@ def test_edit_matrix__thermal_cluster( } ] res = client.put( - f"/v1/studies/{study_id}/matrix?path=input/thermal/series/{area_id}/{cluster_id}/series", + f"/v1/studies/{internal_study_id}/matrix?path=input/thermal/series/{area_id}/{cluster_id}/series", headers={"Authorization": f"Bearer {user_access_token}"}, json=obj, ) @@ -281,7 +281,7 @@ def test_edit_matrix__thermal_cluster( # We can check the modified matrix res = client.get( - f"/v1/studies/{study_id}/raw?path=input/thermal/series/{area_id}/{cluster_id}/series", + f"/v1/studies/{internal_study_id}/raw?path=input/thermal/series/{area_id}/{cluster_id}/series", headers={"Authorization": f"Bearer {user_access_token}"}, ) res.raise_for_status() diff --git a/tests/integration/study_data_blueprint/test_hydro_allocation.py b/tests/integration/study_data_blueprint/test_hydro_allocation.py index d4b04dd332..47eb65f39d 100644 --- a/tests/integration/study_data_blueprint/test_hydro_allocation.py +++ b/tests/integration/study_data_blueprint/test_hydro_allocation.py @@ -20,12 +20,12 @@ def test_get_allocation_form_values( self, client: TestClient, user_access_token: str, - study_id: str, + internal_study_id: str, ) -> None: """Check `get_allocation_form_values` end point""" area_id = "de" res = client.get( - f"/v1/studies/{study_id}/areas/{area_id}/hydro/allocation/form", + f"/v1/studies/{internal_study_id}/areas/{area_id}/hydro/allocation/form", headers={"Authorization": f"Bearer {user_access_token}"}, ) assert res.status_code == http.HTTPStatus.OK, res.json() @@ -37,7 +37,7 @@ def test_get_allocation_form_values__variant( self, client: TestClient, user_access_token: str, - study_id: str, + internal_study_id: str, ) -> None: """ The purpose of this test is to check that we can get the form parameters from a study variant. @@ -46,7 +46,7 @@ def test_get_allocation_form_values__variant( """ # Create a managed study from the RAW study. res = client.post( - f"/v1/studies/{study_id}/copy", + f"/v1/studies/{internal_study_id}/copy", headers={"Authorization": f"Bearer {user_access_token}"}, params={"dest": "Clone", "with_outputs": False, "use_task": False}, ) @@ -109,13 +109,13 @@ def test_get_allocation_matrix( self, client: TestClient, user_access_token: str, - study_id: str, + internal_study_id: str, area_id: str, expected: t.List[t.List[float]], ) -> None: """Check `get_allocation_matrix` end point""" res = client.get( - f"/v1/studies/{study_id}/areas/hydro/allocation/matrix", + f"/v1/studies/{internal_study_id}/areas/hydro/allocation/matrix", headers={"Authorization": f"Bearer {user_access_token}"}, ) assert res.status_code == http.HTTPStatus.OK, res.json() @@ -126,7 +126,7 @@ def test_set_allocation_form_values( self, client: TestClient, user_access_token: str, - study_id: str, + internal_study_id: str, ) -> None: """Check `set_allocation_form_values` end point""" area_id = "de" @@ -137,7 +137,7 @@ def test_set_allocation_form_values( ] } res = client.put( - f"/v1/studies/{study_id}/areas/{area_id}/hydro/allocation/form", + f"/v1/studies/{internal_study_id}/areas/{area_id}/hydro/allocation/form", headers={"Authorization": f"Bearer {user_access_token}"}, json=expected, ) @@ -147,7 +147,7 @@ def test_set_allocation_form_values( # check that the values are updated res = client.get( - f"/v1/studies/{study_id}/raw", + f"/v1/studies/{internal_study_id}/raw", headers={"Authorization": f"Bearer {user_access_token}"}, params={"path": "input/hydro/allocation", "depth": 3}, ) @@ -161,7 +161,7 @@ def test_set_allocation_form_values( } assert actual == expected - def test_create_area(self, client: TestClient, user_access_token: str, study_id: str) -> None: + def test_create_area(self, client: TestClient, user_access_token: str, internal_study_id: str) -> None: """ Given a study, when an area is created, the hydraulic allocation column for this area must be updated with the following values: @@ -171,14 +171,14 @@ def test_create_area(self, client: TestClient, user_access_token: str, study_id: """ area_info = AreaInfoDTO(id="north", name="NORTH", type=AreaType.AREA) res = client.post( - f"/v1/studies/{study_id}/areas", + f"/v1/studies/{internal_study_id}/areas", headers={"Authorization": f"Bearer {user_access_token}"}, data=area_info.json(), ) assert res.status_code == http.HTTPStatus.OK, res.json() res = client.get( - f"/v1/studies/{study_id}/areas/hydro/allocation/matrix", + f"/v1/studies/{internal_study_id}/areas/hydro/allocation/matrix", headers={"Authorization": f"Bearer {user_access_token}"}, ) assert res.status_code == http.HTTPStatus.OK @@ -196,7 +196,7 @@ def test_create_area(self, client: TestClient, user_access_token: str, study_id: } assert actual == expected - def test_delete_area(self, client: TestClient, user_access_token: str, study_id: str) -> None: + def test_delete_area(self, client: TestClient, user_access_token: str, internal_study_id: str) -> None: """ Given a study, when an area is deleted, the hydraulic allocation column for this area must be removed. @@ -211,7 +211,7 @@ def test_delete_area(self, client: TestClient, user_access_token: str, study_id: } for prod_area, allocation_cfg in obj.items(): res = client.post( - f"/v1/studies/{study_id}/raw", + f"/v1/studies/{internal_study_id}/raw", headers={"Authorization": f"Bearer {user_access_token}"}, params={"path": f"input/hydro/allocation/{prod_area}"}, json=allocation_cfg, @@ -221,7 +221,7 @@ def test_delete_area(self, client: TestClient, user_access_token: str, study_id: # Then we remove the "fr" zone. # The deletion should update the allocation matrix of all other zones. res = client.delete( - f"/v1/studies/{study_id}/areas/fr", + f"/v1/studies/{internal_study_id}/areas/fr", headers={"Authorization": f"Bearer {user_access_token}"}, ) assert res.status_code == http.HTTPStatus.OK, res.json() @@ -229,7 +229,7 @@ def test_delete_area(self, client: TestClient, user_access_token: str, study_id: # Check that the "fr" column is removed from the hydraulic allocation matrix. # The row corresponding to "fr" must also be deleted. res = client.get( - f"/v1/studies/{study_id}/areas/hydro/allocation/matrix", + f"/v1/studies/{internal_study_id}/areas/hydro/allocation/matrix", headers={"Authorization": f"Bearer {user_access_token}"}, ) assert res.status_code == http.HTTPStatus.OK, res.json() diff --git a/tests/integration/study_data_blueprint/test_hydro_correlation.py b/tests/integration/study_data_blueprint/test_hydro_correlation.py index 3aea6b60ae..c986ead8de 100644 --- a/tests/integration/study_data_blueprint/test_hydro_correlation.py +++ b/tests/integration/study_data_blueprint/test_hydro_correlation.py @@ -20,12 +20,12 @@ def test_get_correlation_form_values( self, client: TestClient, user_access_token: str, - study_id: str, + internal_study_id: str, ): """Check `get_correlation_form_values` end point""" area_id = "fr" res = client.get( - f"/v1/studies/{study_id}/areas/{area_id}/hydro/correlation/form", + f"/v1/studies/{internal_study_id}/areas/{area_id}/hydro/correlation/form", headers={"Authorization": f"Bearer {user_access_token}"}, ) assert res.status_code == HTTPStatus.OK, res.json() @@ -44,7 +44,7 @@ def test_set_correlation_form_values( self, client: TestClient, user_access_token: str, - study_id: str, + internal_study_id: str, ): """Check `set_correlation_form_values` end point""" area_id = "fr" @@ -57,7 +57,7 @@ def test_set_correlation_form_values( ] } res = client.put( - f"/v1/studies/{study_id}/areas/{area_id}/hydro/correlation/form", + f"/v1/studies/{internal_study_id}/areas/{area_id}/hydro/correlation/form", headers={"Authorization": f"Bearer {user_access_token}"}, json=obj, ) @@ -74,7 +74,7 @@ def test_set_correlation_form_values( # check that the form is updated correctly res = client.get( - f"/v1/studies/{study_id}/areas/{area_id}/hydro/correlation/form", + f"/v1/studies/{internal_study_id}/areas/{area_id}/hydro/correlation/form", headers={"Authorization": f"Bearer {user_access_token}"}, ) assert res.status_code == HTTPStatus.OK, res.json() @@ -90,7 +90,7 @@ def test_set_correlation_form_values( # check that the matrix is symmetric res = client.get( - f"/v1/studies/{study_id}/areas/hydro/correlation/matrix", + f"/v1/studies/{internal_study_id}/areas/hydro/correlation/matrix", headers={"Authorization": f"Bearer {user_access_token}"}, ) assert res.status_code == HTTPStatus.OK, res.json() @@ -158,14 +158,14 @@ def test_get_correlation_matrix( self, client: TestClient, user_access_token: str, - study_id: str, + internal_study_id: str, columns: str, expected: List[List[float]], ): """Check `get_correlation_matrix` end point""" query = f"columns={columns}" if columns else "" res = client.get( - f"/v1/studies/{study_id}/areas/hydro/correlation/matrix?{query}", + f"/v1/studies/{internal_study_id}/areas/hydro/correlation/matrix?{query}", headers={"Authorization": f"Bearer {user_access_token}"}, ) assert res.status_code == HTTPStatus.OK, res.json() @@ -176,7 +176,7 @@ def test_set_correlation_matrix( self, client: TestClient, user_access_token: str, - study_id: str, + internal_study_id: str, ): """Check `set_correlation_matrix` end point""" obj = { @@ -190,7 +190,7 @@ def test_set_correlation_matrix( "index": ["de", "es", "fr", "it"], } res = client.put( - f"/v1/studies/{study_id}/areas/hydro/correlation/matrix", + f"/v1/studies/{internal_study_id}/areas/hydro/correlation/matrix", headers={"Authorization": f"Bearer {user_access_token}"}, json=obj, ) @@ -200,7 +200,7 @@ def test_set_correlation_matrix( assert actual == expected res = client.get( - f"/v1/studies/{study_id}/areas/hydro/correlation/matrix", + f"/v1/studies/{internal_study_id}/areas/hydro/correlation/matrix", headers={"Authorization": f"Bearer {user_access_token}"}, ) assert res.status_code == HTTPStatus.OK, res.json() @@ -217,7 +217,7 @@ def test_set_correlation_matrix( } assert actual == expected - def test_create_area(self, client: TestClient, user_access_token: str, study_id: str): + def test_create_area(self, client: TestClient, user_access_token: str, internal_study_id: str): """ Given a study, when an area is created, the hydraulic correlation column for this area must be updated with the following values: @@ -227,14 +227,14 @@ def test_create_area(self, client: TestClient, user_access_token: str, study_id: """ area_info = AreaInfoDTO(id="north", name="NORTH", type="AREA") res = client.post( - f"/v1/studies/{study_id}/areas", + f"/v1/studies/{internal_study_id}/areas", headers={"Authorization": f"Bearer {user_access_token}"}, data=area_info.json(), ) assert res.status_code == HTTPStatus.OK, res.json() res = client.get( - f"/v1/studies/{study_id}/areas/hydro/correlation/matrix", + f"/v1/studies/{internal_study_id}/areas/hydro/correlation/matrix", headers={"Authorization": f"Bearer {user_access_token}"}, ) assert res.status_code == HTTPStatus.OK @@ -252,7 +252,7 @@ def test_create_area(self, client: TestClient, user_access_token: str, study_id: } assert actual == expected - def test_delete_area(self, client: TestClient, user_access_token: str, study_id: str): + def test_delete_area(self, client: TestClient, user_access_token: str, internal_study_id: str): """ Given a study, when an area is deleted, the hydraulic correlation column for this area must be removed. @@ -270,7 +270,7 @@ def test_delete_area(self, client: TestClient, user_access_token: str, study_id: } } res = client.post( - f"/v1/studies/{study_id}/raw?path=input/hydro/prepro/correlation", + f"/v1/studies/{internal_study_id}/raw?path=input/hydro/prepro/correlation", headers={"Authorization": f"Bearer {user_access_token}"}, json=correlation_cfg, ) @@ -279,7 +279,7 @@ def test_delete_area(self, client: TestClient, user_access_token: str, study_id: # Then we remove the "fr" zone. # The deletion should update the correlation matrix of all other zones. res = client.delete( - f"/v1/studies/{study_id}/areas/fr", + f"/v1/studies/{internal_study_id}/areas/fr", headers={"Authorization": f"Bearer {user_access_token}"}, ) assert res.status_code == HTTPStatus.OK, res.json() @@ -287,7 +287,7 @@ def test_delete_area(self, client: TestClient, user_access_token: str, study_id: # Check that the "fr" column is removed from the hydraulic correlation matrix. # The row corresponding to "fr" must also be deleted. res = client.get( - f"/v1/studies/{study_id}/areas/hydro/correlation/matrix", + f"/v1/studies/{internal_study_id}/areas/hydro/correlation/matrix", headers={"Authorization": f"Bearer {user_access_token}"}, ) assert res.status_code == HTTPStatus.OK, res.json() diff --git a/tests/integration/study_data_blueprint/test_hydro_inflow_structure.py b/tests/integration/study_data_blueprint/test_hydro_inflow_structure.py index 0a8bb82ed6..f673882752 100644 --- a/tests/integration/study_data_blueprint/test_hydro_inflow_structure.py +++ b/tests/integration/study_data_blueprint/test_hydro_inflow_structure.py @@ -18,7 +18,7 @@ def test_get_inflow_structure( self, client: TestClient, user_access_token: str, - study_id: str, + internal_study_id: str, ): user_header = {"Authorization": f"Bearer {user_access_token}"} area_id = "fr" @@ -29,7 +29,7 @@ def test_get_inflow_structure( # Check that the default values are returned res = client.get( - f"/v1/studies/{study_id}/areas/{area_id}/hydro/inflow-structure", + f"/v1/studies/{internal_study_id}/areas/{area_id}/hydro/inflow-structure", headers=user_header, ) assert res.status_code == HTTPStatus.OK, res.json() @@ -40,7 +40,7 @@ def test_get_inflow_structure( # Update the values obj = {"interMonthlyCorrelation": 0.8} res = client.put( - f"/v1/studies/{study_id}/areas/{area_id}/hydro/inflow-structure", + f"/v1/studies/{internal_study_id}/areas/{area_id}/hydro/inflow-structure", headers=user_header, json=obj, ) @@ -48,7 +48,7 @@ def test_get_inflow_structure( # Check that the right values are returned res = client.get( - f"/v1/studies/{study_id}/areas/{area_id}/hydro/inflow-structure", + f"/v1/studies/{internal_study_id}/areas/{area_id}/hydro/inflow-structure", headers=user_header, ) assert res.status_code == HTTPStatus.OK, res.json() @@ -62,7 +62,7 @@ def test_get_inflow_structure( # Create a managed study from the RAW study. res = client.post( - f"/v1/studies/{study_id}/copy", + f"/v1/studies/{internal_study_id}/copy", headers={"Authorization": f"Bearer {user_access_token}"}, params={"dest": "Clone", "with_outputs": False, "use_task": False}, ) @@ -132,7 +132,7 @@ def test_update_inflow_structure__invalid_values( self, client: TestClient, user_access_token: str, - study_id: str, + internal_study_id: str, ): user_header = {"Authorization": f"Bearer {user_access_token}"} area_id = "fr" @@ -140,7 +140,7 @@ def test_update_inflow_structure__invalid_values( # Update the values with invalid values obj = {"interMonthlyCorrelation": 1.1} res = client.put( - f"/v1/studies/{study_id}/areas/{area_id}/hydro/inflow-structure", + f"/v1/studies/{internal_study_id}/areas/{area_id}/hydro/inflow-structure", headers=user_header, json=obj, ) @@ -148,7 +148,7 @@ def test_update_inflow_structure__invalid_values( obj = {"interMonthlyCorrelation": -0.1} res = client.put( - f"/v1/studies/{study_id}/areas/{area_id}/hydro/inflow-structure", + f"/v1/studies/{internal_study_id}/areas/{area_id}/hydro/inflow-structure", headers=user_header, json=obj, ) diff --git a/tests/integration/study_data_blueprint/test_renewable.py b/tests/integration/study_data_blueprint/test_renewable.py index 8a9d575d97..b6c450e8f3 100644 --- a/tests/integration/study_data_blueprint/test_renewable.py +++ b/tests/integration/study_data_blueprint/test_renewable.py @@ -51,11 +51,11 @@ def test_lifecycle( self, client: TestClient, user_access_token: str, - study_id: str, + internal_study_id: str, ) -> None: # Upgrade study to version 810 res = client.put( - f"/v1/studies/{study_id}/upgrade", + f"/v1/studies/{internal_study_id}/upgrade", headers={"Authorization": f"Bearer {user_access_token}"}, params={"target_version": 810}, ) @@ -75,7 +75,7 @@ def test_lifecycle( "data": {"renewable-generation-modelling": "clusters"}, } res = client.post( - f"/v1/studies/{study_id}/commands", + f"/v1/studies/{internal_study_id}/commands", headers={"Authorization": f"Bearer {user_access_token}"}, json=[{"action": "update_config", "args": args}], ) @@ -95,7 +95,7 @@ def test_lifecycle( attempts = [{}, {"name": ""}, {"name": "!??"}] for attempt in attempts: res = client.post( - f"/v1/studies/{study_id}/areas/{area_id}/clusters/renewable", + f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/renewable", headers={"Authorization": f"Bearer {user_access_token}"}, json=attempt, ) @@ -112,7 +112,7 @@ def test_lifecycle( "tsInterpretation": "production-factor", } res = client.post( - f"/v1/studies/{study_id}/areas/{area_id}/clusters/renewable", + f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/renewable", headers={"Authorization": f"Bearer {user_access_token}"}, json=fr_solar_pv_props, ) @@ -125,7 +125,7 @@ def test_lifecycle( # reading the properties of a renewable cluster res = client.get( - f"/v1/studies/{study_id}/areas/{area_id}/clusters/renewable/{fr_solar_pv_id}", + f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/renewable/{fr_solar_pv_id}", headers={"Authorization": f"Bearer {user_access_token}"}, ) assert res.status_code == 200, res.json() @@ -139,14 +139,14 @@ def test_lifecycle( matrix_path = f"input/renewables/series/{area_id}/{fr_solar_pv_id.lower()}/series" args = {"target": matrix_path, "matrix": matrix} res = client.post( - f"/v1/studies/{study_id}/commands", + f"/v1/studies/{internal_study_id}/commands", json=[{"action": "replace_matrix", "args": args}], headers={"Authorization": f"Bearer {user_access_token}"}, ) assert res.status_code in {200, 201}, res.json() res = client.get( - f"/v1/studies/{study_id}/raw", + f"/v1/studies/{internal_study_id}/raw", params={"path": matrix_path}, headers={"Authorization": f"Bearer {user_access_token}"}, ) @@ -159,7 +159,7 @@ def test_lifecycle( # Reading the list of renewable clusters res = client.get( - f"/v1/studies/{study_id}/areas/{area_id}/clusters/renewable", + f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/renewable", headers={"Authorization": f"Bearer {user_access_token}"}, ) assert res.status_code == 200, res.json() @@ -167,7 +167,7 @@ def test_lifecycle( # updating properties res = client.patch( - f"/v1/studies/{study_id}/areas/{area_id}/clusters/renewable/{fr_solar_pv_id}", + f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/renewable/{fr_solar_pv_id}", headers={"Authorization": f"Bearer {user_access_token}"}, json={ "name": "FR Solar pv old 1", @@ -183,7 +183,7 @@ def test_lifecycle( assert res.json() == fr_solar_pv_cfg res = client.get( - f"/v1/studies/{study_id}/areas/{area_id}/clusters/renewable/{fr_solar_pv_id}", + f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/renewable/{fr_solar_pv_id}", headers={"Authorization": f"Bearer {user_access_token}"}, ) assert res.status_code == 200, res.json() @@ -195,7 +195,7 @@ def test_lifecycle( # updating properties res = client.patch( - f"/v1/studies/{study_id}/areas/{area_id}/clusters/renewable/{fr_solar_pv_id}", + f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/renewable/{fr_solar_pv_id}", headers={"Authorization": f"Bearer {user_access_token}"}, json={ "nominalCapacity": 2260, @@ -215,7 +215,7 @@ def test_lifecycle( # The `unitCount` property must be an integer greater than 0. bad_properties = {"unitCount": 0} res = client.patch( - f"/v1/studies/{study_id}/areas/{area_id}/clusters/renewable/{fr_solar_pv_id}", + f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/renewable/{fr_solar_pv_id}", headers={"Authorization": f"Bearer {user_access_token}"}, json=bad_properties, ) @@ -224,7 +224,7 @@ def test_lifecycle( # The renewable cluster properties should not have been updated. res = client.get( - f"/v1/studies/{study_id}/areas/{area_id}/clusters/renewable/{fr_solar_pv_id}", + f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/renewable/{fr_solar_pv_id}", headers={"Authorization": f"Bearer {user_access_token}"}, ) assert res.status_code == 200, res.json() @@ -236,7 +236,7 @@ def test_lifecycle( new_name = "Duplicate of SolarPV" res = client.post( - f"/v1/studies/{study_id}/areas/{area_id}/renewables/{fr_solar_pv_id}", + f"/v1/studies/{internal_study_id}/areas/{area_id}/renewables/{fr_solar_pv_id}", headers={"Authorization": f"Bearer {user_access_token}"}, params={"newName": new_name}, ) @@ -251,7 +251,7 @@ def test_lifecycle( # asserts the matrix has also been duplicated new_cluster_matrix_path = f"input/renewables/series/{area_id}/{duplicated_id.lower()}/series" res = client.get( - f"/v1/studies/{study_id}/raw", + f"/v1/studies/{internal_study_id}/raw", params={"path": new_cluster_matrix_path}, headers={"Authorization": f"Bearer {user_access_token}"}, ) @@ -265,7 +265,7 @@ def test_lifecycle( # To delete a renewable cluster, we need to provide its ID. res = client.request( "DELETE", - f"/v1/studies/{study_id}/areas/{area_id}/clusters/renewable", + f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/renewable", headers={"Authorization": f"Bearer {user_access_token}"}, json=[fr_solar_pv_id], ) @@ -275,7 +275,7 @@ def test_lifecycle( # If the renewable cluster list is empty, the deletion should be a no-op. res = client.request( "DELETE", - f"/v1/studies/{study_id}/areas/{area_id}/clusters/renewable", + f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/renewable", headers={"Authorization": f"Bearer {user_access_token}"}, json=[], ) @@ -286,7 +286,7 @@ def test_lifecycle( # Create two clusters other_cluster_name = "Other Cluster 1" res = client.post( - f"/v1/studies/{study_id}/areas/{area_id}/clusters/renewable", + f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/renewable", headers={"Authorization": f"Bearer {user_access_token}"}, json={"name": other_cluster_name}, ) @@ -294,7 +294,7 @@ def test_lifecycle( other_cluster_id1 = res.json()["id"] res = client.post( - f"/v1/studies/{study_id}/areas/{area_id}/clusters/renewable", + f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/renewable", headers={"Authorization": f"Bearer {user_access_token}"}, json={"name": "Other Cluster 2"}, ) @@ -304,7 +304,7 @@ def test_lifecycle( # We can delete two renewable clusters at once. res = client.request( "DELETE", - f"/v1/studies/{study_id}/areas/{area_id}/clusters/renewable", + f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/renewable", headers={"Authorization": f"Bearer {user_access_token}"}, json=[other_cluster_id2, duplicated_id], ) @@ -313,7 +313,7 @@ def test_lifecycle( # There should only be one remaining cluster res = client.get( - f"/v1/studies/{study_id}/areas/{area_id}/clusters/renewable", + f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/renewable", headers={"Authorization": f"Bearer {user_access_token}"}, ) assert res.status_code == 200 @@ -328,7 +328,7 @@ def test_lifecycle( bad_area_id = "bad_area" res = client.request( "DELETE", - f"/v1/studies/{study_id}/areas/{bad_area_id}/clusters/renewable", + f"/v1/studies/{internal_study_id}/areas/{bad_area_id}/clusters/renewable", headers={"Authorization": f"Bearer {user_access_token}"}, json=[fr_solar_pv_id], ) @@ -357,7 +357,7 @@ def test_lifecycle( # Check GET with wrong `area_id` res = client.get( - f"/v1/studies/{study_id}/areas/{bad_area_id}/clusters/renewable/{fr_solar_pv_id}", + f"/v1/studies/{internal_study_id}/areas/{bad_area_id}/clusters/renewable/{fr_solar_pv_id}", headers={"Authorization": f"Bearer {user_access_token}"}, ) obj = res.json() @@ -388,7 +388,7 @@ def test_lifecycle( # Check POST with wrong `area_id` res = client.post( - f"/v1/studies/{study_id}/areas/{bad_area_id}/clusters/renewable", + f"/v1/studies/{internal_study_id}/areas/{bad_area_id}/clusters/renewable", headers={"Authorization": f"Bearer {user_access_token}"}, json={ "name": fr_solar_pv, @@ -407,7 +407,7 @@ def test_lifecycle( # Check POST with wrong `group` res = client.post( - f"/v1/studies/{study_id}/areas/{area_id}/clusters/renewable", + f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/renewable", headers={"Authorization": f"Bearer {user_access_token}"}, json={"name": fr_solar_pv, "group": "GroupFoo"}, ) @@ -418,7 +418,7 @@ def test_lifecycle( # Check PATCH with the wrong `area_id` res = client.patch( - f"/v1/studies/{study_id}/areas/{bad_area_id}/clusters/renewable/{fr_solar_pv_id}", + f"/v1/studies/{internal_study_id}/areas/{bad_area_id}/clusters/renewable/{fr_solar_pv_id}", headers={"Authorization": f"Bearer {user_access_token}"}, json={ "group": "Wind Onshore", @@ -436,7 +436,7 @@ def test_lifecycle( # Check PATCH with the wrong `cluster_id` bad_cluster_id = "bad_cluster" res = client.patch( - f"/v1/studies/{study_id}/areas/{area_id}/clusters/renewable/{bad_cluster_id}", + f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/renewable/{bad_cluster_id}", headers={"Authorization": f"Bearer {user_access_token}"}, json={ "group": "Wind Onshore", @@ -470,7 +470,7 @@ def test_lifecycle( # Cannot duplicate a fake cluster unknown_id = "unknown" res = client.post( - f"/v1/studies/{study_id}/areas/{area_id}/renewables/{unknown_id}", + f"/v1/studies/{internal_study_id}/areas/{area_id}/renewables/{unknown_id}", headers={"Authorization": f"Bearer {user_access_token}"}, params={"newName": "duplicata"}, ) @@ -481,7 +481,7 @@ def test_lifecycle( # Cannot duplicate with an existing id res = client.post( - f"/v1/studies/{study_id}/areas/{area_id}/renewables/{other_cluster_id1}", + f"/v1/studies/{internal_study_id}/areas/{area_id}/renewables/{other_cluster_id1}", headers={"Authorization": f"Bearer {user_access_token}"}, params={"newName": other_cluster_name.upper()}, # different case, but same ID ) diff --git a/tests/integration/study_data_blueprint/test_st_storage.py b/tests/integration/study_data_blueprint/test_st_storage.py index b8aa0de878..68fe46b138 100644 --- a/tests/integration/study_data_blueprint/test_st_storage.py +++ b/tests/integration/study_data_blueprint/test_st_storage.py @@ -49,7 +49,7 @@ def test_lifecycle__nominal( self, client: TestClient, user_access_token: str, - study_id: str, + internal_study_id: str, study_type: str, study_version: int, default_output: t.Dict[str, t.Any], @@ -85,7 +85,7 @@ def test_lifecycle__nominal( # Upgrade study to version 860 or above res = client.put( - f"/v1/studies/{study_id}/upgrade", + f"/v1/studies/{internal_study_id}/upgrade", headers=user_headers, params={"target_version": study_version}, ) @@ -96,22 +96,22 @@ def test_lifecycle__nominal( # Copies the study, to convert it into a managed one. res = client.post( - f"/v1/studies/{study_id}/copy", + f"/v1/studies/{internal_study_id}/copy", headers={"Authorization": f"Bearer {user_access_token}"}, params={"dest": "default", "with_outputs": False, "use_task": False}, # type: ignore ) assert res.status_code == 201, res.json() - study_id = res.json() + internal_study_id = res.json() if study_type == "variant": # Create Variant res = client.post( - f"/v1/studies/{study_id}/variants", + f"/v1/studies/{internal_study_id}/variants", headers=user_headers, params={"name": "Variant 1"}, ) assert res.status_code in {200, 201}, res.json() - study_id = res.json() + internal_study_id = res.json() # ============================= # SHORT-TERM STORAGE CREATION @@ -126,7 +126,7 @@ def test_lifecycle__nominal( attempts = [{}, {"name": ""}, {"name": "!??"}] for attempt in attempts: res = client.post( - f"/v1/studies/{study_id}/areas/{area_id}/storages", + f"/v1/studies/{internal_study_id}/areas/{area_id}/storages", headers=user_headers, json=attempt, ) @@ -143,7 +143,7 @@ def test_lifecycle__nominal( "reservoirCapacity": 1500, } res = client.post( - f"/v1/studies/{study_id}/areas/{area_id}/storages", + f"/v1/studies/{internal_study_id}/areas/{area_id}/storages", headers=user_headers, json=siemens_properties, ) @@ -155,7 +155,7 @@ def test_lifecycle__nominal( # reading the properties of a short-term storage res = client.get( - f"/v1/studies/{study_id}/areas/{area_id}/storages/{siemens_battery_id}", + f"/v1/studies/{internal_study_id}/areas/{area_id}/storages/{siemens_battery_id}", headers=user_headers, ) assert res.status_code == 200, res.json() @@ -169,7 +169,7 @@ def test_lifecycle__nominal( array = np.random.randint(0, 1000, size=(8760, 1)) array_list = array.tolist() res = client.put( - f"/v1/studies/{study_id}/areas/{area_id}/storages/{siemens_battery_id}/series/inflows", + f"/v1/studies/{internal_study_id}/areas/{area_id}/storages/{siemens_battery_id}/series/inflows", headers=user_headers, json={ "index": list(range(array.shape[0])), @@ -182,7 +182,7 @@ def test_lifecycle__nominal( # reading the matrix of a short-term storage res = client.get( - f"/v1/studies/{study_id}/areas/{area_id}/storages/{siemens_battery_id}/series/inflows", + f"/v1/studies/{internal_study_id}/areas/{area_id}/storages/{siemens_battery_id}/series/inflows", headers=user_headers, ) assert res.status_code == 200, res.json() @@ -192,7 +192,7 @@ def test_lifecycle__nominal( # validating the matrices of a short-term storage res = client.get( - f"/v1/studies/{study_id}/areas/{area_id}/storages/{siemens_battery_id}/validate", + f"/v1/studies/{internal_study_id}/areas/{area_id}/storages/{siemens_battery_id}/validate", headers=user_headers, ) assert res.status_code == 200, res.json() @@ -204,7 +204,7 @@ def test_lifecycle__nominal( # Reading the list of short-term storages res = client.get( - f"/v1/studies/{study_id}/areas/{area_id}/storages", + f"/v1/studies/{internal_study_id}/areas/{area_id}/storages", headers=user_headers, ) assert res.status_code == 200, res.json() @@ -212,7 +212,7 @@ def test_lifecycle__nominal( # updating properties res = client.patch( - f"/v1/studies/{study_id}/areas/{area_id}/storages/{siemens_battery_id}", + f"/v1/studies/{internal_study_id}/areas/{area_id}/storages/{siemens_battery_id}", headers=user_headers, json={ "name": "New Siemens Battery", @@ -228,7 +228,7 @@ def test_lifecycle__nominal( assert res.json() == siemens_output res = client.get( - f"/v1/studies/{study_id}/areas/{area_id}/storages/{siemens_battery_id}", + f"/v1/studies/{internal_study_id}/areas/{area_id}/storages/{siemens_battery_id}", headers=user_headers, ) assert res.status_code == 200, res.json() @@ -240,7 +240,7 @@ def test_lifecycle__nominal( # updating properties res = client.patch( - f"/v1/studies/{study_id}/areas/{area_id}/storages/{siemens_battery_id}", + f"/v1/studies/{internal_study_id}/areas/{area_id}/storages/{siemens_battery_id}", headers=user_headers, json={ "initialLevel": 0.59, @@ -260,7 +260,7 @@ def test_lifecycle__nominal( # The `efficiency` property must be a float between 0 and 1. bad_properties = {"efficiency": 2.0} res = client.patch( - f"/v1/studies/{study_id}/areas/{area_id}/storages/{siemens_battery_id}", + f"/v1/studies/{internal_study_id}/areas/{area_id}/storages/{siemens_battery_id}", headers=user_headers, json=bad_properties, ) @@ -269,7 +269,7 @@ def test_lifecycle__nominal( # The short-term storage properties should not have been updated. res = client.get( - f"/v1/studies/{study_id}/areas/{area_id}/storages/{siemens_battery_id}", + f"/v1/studies/{internal_study_id}/areas/{area_id}/storages/{siemens_battery_id}", headers=user_headers, ) assert res.status_code == 200, res.json() @@ -281,7 +281,7 @@ def test_lifecycle__nominal( new_name = "Duplicate of Siemens" res = client.post( - f"/v1/studies/{study_id}/areas/{area_id}/storages/{siemens_battery_id}", + f"/v1/studies/{internal_study_id}/areas/{area_id}/storages/{siemens_battery_id}", headers={"Authorization": f"Bearer {user_access_token}"}, params={"newName": new_name}, ) @@ -295,7 +295,7 @@ def test_lifecycle__nominal( # asserts the matrix has also been duplicated res = client.get( - f"/v1/studies/{study_id}/areas/{area_id}/storages/{duplicated_id}/series/inflows", + f"/v1/studies/{internal_study_id}/areas/{area_id}/storages/{duplicated_id}/series/inflows", headers={"Authorization": f"Bearer {user_access_token}"}, ) assert res.status_code == 200 @@ -308,7 +308,7 @@ def test_lifecycle__nominal( # To delete a short-term storage, we need to provide its ID. res = client.request( "DELETE", - f"/v1/studies/{study_id}/areas/{area_id}/storages", + f"/v1/studies/{internal_study_id}/areas/{area_id}/storages", headers=user_headers, json=[siemens_battery_id], ) @@ -318,7 +318,7 @@ def test_lifecycle__nominal( # If the short-term storage list is empty, the deletion should be a no-op. res = client.request( "DELETE", - f"/v1/studies/{study_id}/areas/{area_id}/storages", + f"/v1/studies/{internal_study_id}/areas/{area_id}/storages", headers=user_headers, json=[], ) @@ -338,7 +338,7 @@ def test_lifecycle__nominal( "initialLevelOptim": False, } res = client.post( - f"/v1/studies/{study_id}/areas/{area_id}/storages", + f"/v1/studies/{internal_study_id}/areas/{area_id}/storages", headers=user_headers, json=siemens_properties, ) @@ -357,7 +357,7 @@ def test_lifecycle__nominal( "initialLevel": 1, } res = client.post( - f"/v1/studies/{study_id}/areas/{area_id}/storages", + f"/v1/studies/{internal_study_id}/areas/{area_id}/storages", headers=user_headers, json=grand_maison_properties, ) @@ -367,7 +367,7 @@ def test_lifecycle__nominal( # We can check that we have 2 short-term storages in the list. # Reading the list of short-term storages res = client.get( - f"/v1/studies/{study_id}/areas/{area_id}/storages", + f"/v1/studies/{internal_study_id}/areas/{area_id}/storages", headers=user_headers, ) assert res.status_code == 200, res.json() @@ -378,7 +378,7 @@ def test_lifecycle__nominal( # We can delete the three short-term storages at once. res = client.request( "DELETE", - f"/v1/studies/{study_id}/areas/{area_id}/storages", + f"/v1/studies/{internal_study_id}/areas/{area_id}/storages", headers=user_headers, json=[grand_maison_id, duplicated_output["id"]], ) @@ -387,7 +387,7 @@ def test_lifecycle__nominal( # Only one st-storage should remain. res = client.get( - f"/v1/studies/{study_id}/areas/{area_id}/storages", + f"/v1/studies/{internal_study_id}/areas/{area_id}/storages", headers=user_headers, ) assert res.status_code == 200, res.json() @@ -401,7 +401,7 @@ def test_lifecycle__nominal( bad_area_id = "bad_area" res = client.request( "DELETE", - f"/v1/studies/{study_id}/areas/{bad_area_id}/storages", + f"/v1/studies/{internal_study_id}/areas/{bad_area_id}/storages", headers=user_headers, json=[siemens_battery_id], ) @@ -426,7 +426,7 @@ def test_lifecycle__nominal( # Check get with wrong `area_id` res = client.get( - f"/v1/studies/{study_id}/areas/{bad_area_id}/storages/{siemens_battery_id}", + f"/v1/studies/{internal_study_id}/areas/{bad_area_id}/storages/{siemens_battery_id}", headers=user_headers, ) obj = res.json() @@ -457,7 +457,7 @@ def test_lifecycle__nominal( # Check POST with wrong `area_id` res = client.post( - f"/v1/studies/{study_id}/areas/{bad_area_id}/storages", + f"/v1/studies/{internal_study_id}/areas/{bad_area_id}/storages", headers=user_headers, json={"name": siemens_battery, "group": "Battery"}, ) @@ -468,7 +468,7 @@ def test_lifecycle__nominal( # Check POST with wrong `group` res = client.post( - f"/v1/studies/{study_id}/areas/{area_id}/storages", + f"/v1/studies/{internal_study_id}/areas/{area_id}/storages", headers=user_headers, json={"name": siemens_battery, "group": "GroupFoo"}, ) @@ -479,7 +479,7 @@ def test_lifecycle__nominal( # Check PATCH with the wrong `area_id` res = client.patch( - f"/v1/studies/{study_id}/areas/{bad_area_id}/storages/{siemens_battery_id}", + f"/v1/studies/{internal_study_id}/areas/{bad_area_id}/storages/{siemens_battery_id}", headers=user_headers, json={"efficiency": 1.0}, ) @@ -491,7 +491,7 @@ def test_lifecycle__nominal( # Check PATCH with the wrong `storage_id` bad_storage_id = "bad_storage" res = client.patch( - f"/v1/studies/{study_id}/areas/{area_id}/storages/{bad_storage_id}", + f"/v1/studies/{internal_study_id}/areas/{area_id}/storages/{bad_storage_id}", headers=user_headers, json={"efficiency": 1.0}, ) @@ -516,7 +516,7 @@ def test_lifecycle__nominal( # Cannot duplicate a unknown st-storage unknown_id = "unknown" res = client.post( - f"/v1/studies/{study_id}/areas/{area_id}/storages/{unknown_id}", + f"/v1/studies/{internal_study_id}/areas/{area_id}/storages/{unknown_id}", headers={"Authorization": f"Bearer {user_access_token}"}, params={"newName": "duplicata"}, ) @@ -527,7 +527,7 @@ def test_lifecycle__nominal( # Cannot duplicate with an existing id res = client.post( - f"/v1/studies/{study_id}/areas/{area_id}/storages/{siemens_battery_id}", + f"/v1/studies/{internal_study_id}/areas/{area_id}/storages/{siemens_battery_id}", headers={"Authorization": f"Bearer {user_access_token}"}, params={"newName": siemens_battery.upper()}, # different case, but same ID ) @@ -540,7 +540,7 @@ def test_lifecycle__nominal( # Cannot specify the field 'enabled' before v8.8 properties = {"enabled": False, "name": "fake_name", "group": "Battery"} res = client.post( - f"/v1/studies/{study_id}/areas/{area_id}/storages", + f"/v1/studies/{internal_study_id}/areas/{area_id}/storages", headers=user_headers, json=properties, ) diff --git a/tests/integration/study_data_blueprint/test_table_mode.py b/tests/integration/study_data_blueprint/test_table_mode.py index 3ffbeb2b46..2e36b55490 100644 --- a/tests/integration/study_data_blueprint/test_table_mode.py +++ b/tests/integration/study_data_blueprint/test_table_mode.py @@ -23,7 +23,7 @@ class TestTableMode: @pytest.mark.parametrize("study_version", [0, 810, 830, 860, 870, 880]) def test_lifecycle__nominal( - self, client: TestClient, user_access_token: str, study_id: str, study_version: int + self, client: TestClient, user_access_token: str, internal_study_id: str, study_version: int ) -> None: client.headers = {"Authorization": f"Bearer {user_access_token}"} @@ -35,7 +35,7 @@ def test_lifecycle__nominal( # Upgrade the study to the desired version if study_version: res = client.put( - f"/v1/studies/{study_id}/upgrade", + f"/v1/studies/{internal_study_id}/upgrade", params={"target_version": study_version}, ) assert res.status_code == 200, res.json() @@ -45,7 +45,7 @@ def test_lifecycle__nominal( assert task.status == TaskStatus.COMPLETED, task # Create another link to test specific bug. - res = client.post(f"/v1/studies/{study_id}/links", json={"area1": "de", "area2": "it"}) + res = client.post(f"/v1/studies/{internal_study_id}/links", json={"area1": "de", "area2": "it"}) assert res.status_code in [200, 201], res.json() # Table Mode - Area @@ -83,7 +83,7 @@ def test_lifecycle__nominal( _es_values["adequacyPatchMode"] = "inside" res = client.put( - f"/v1/studies/{study_id}/table-mode/areas", + f"/v1/studies/{internal_study_id}/table-mode/areas", json={ "de": _de_values, "es": _es_values, @@ -147,7 +147,7 @@ def test_lifecycle__nominal( actual = res.json() assert actual == expected_areas - res = client.get(f"/v1/studies/{study_id}/table-mode/areas") + res = client.get(f"/v1/studies/{internal_study_id}/table-mode/areas") assert res.status_code == 200, res.json() actual = res.json() assert actual == expected_areas @@ -158,7 +158,7 @@ def test_lifecycle__nominal( "averageUnsuppliedEnergyCost": 456, } res = client.put( - f"/v1/studies/{study_id}/table-mode/areas", + f"/v1/studies/{internal_study_id}/table-mode/areas", json={"de": _de_values}, ) assert res.status_code == 200, res.json() @@ -191,7 +191,7 @@ def test_lifecycle__nominal( } res = client.put( - f"/v1/studies/{study_id}/table-mode/links", + f"/v1/studies/{internal_study_id}/table-mode/links", json={ "de / fr": { "colorRgb": "#FFA500", @@ -287,7 +287,7 @@ def test_lifecycle__nominal( del expected_result["de / it"] assert actual == expected_result - res = client.get(f"/v1/studies/{study_id}/table-mode/links") + res = client.get(f"/v1/studies/{internal_study_id}/table-mode/links") assert res.status_code == 200, res.json() actual = res.json() # asserts the `de / it` link is not removed. @@ -354,7 +354,7 @@ def test_lifecycle__nominal( _solar_values.update({"costGeneration": "useCostTimeseries", "efficiency": 87, "variableOMCost": -12.5}) res = client.put( - f"/v1/studies/{study_id}/table-mode/thermals", + f"/v1/studies/{internal_study_id}/table-mode/thermals", json={ "de / 01_solar": _solar_values, "de / 02_wind_on": _wind_on_values, @@ -437,7 +437,7 @@ def test_lifecycle__nominal( assert res.json()["de / 02_wind_on"] == expected_thermals["de / 02_wind_on"] res = client.get( - f"/v1/studies/{study_id}/table-mode/thermals", + f"/v1/studies/{internal_study_id}/table-mode/thermals", params={"columns": ",".join(["group", "unitCount", "nominalCapacity", "so2"])}, ) assert res.status_code == 200, res.json() @@ -500,7 +500,7 @@ def test_lifecycle__nominal( "data": {"renewable-generation-modelling": "clusters"}, } res = client.post( - f"/v1/studies/{study_id}/commands", + f"/v1/studies/{internal_study_id}/commands", json=[{"action": "update_config", "args": args}], ) assert res.status_code == 200, res.json() @@ -559,7 +559,7 @@ def test_lifecycle__nominal( for area_id, generators in generators_by_country.items(): for generator_id, generator in generators.items(): res = client.post( - f"/v1/studies/{study_id}/areas/{area_id}/clusters/renewable", + f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/renewable", json=generator, ) res.raise_for_status() @@ -584,7 +584,7 @@ def test_lifecycle__nominal( # Update some generators using the table mode res = client.put( - f"/v1/studies/{study_id}/table-mode/renewables", + f"/v1/studies/{internal_study_id}/table-mode/renewables", json={ "fr / Dieppe": {"enabled": False}, "fr / La Rochelle": {"enabled": True, "nominalCapacity": 3.1, "unitCount": 2}, @@ -594,7 +594,7 @@ def test_lifecycle__nominal( assert res.status_code == 200, res.json() res = client.get( - f"/v1/studies/{study_id}/table-mode/renewables", + f"/v1/studies/{internal_study_id}/table-mode/renewables", params={"columns": ",".join(["group", "enabled", "unitCount", "nominalCapacity"])}, ) assert res.status_code == 200, res.json() @@ -679,7 +679,7 @@ def test_lifecycle__nominal( for area_id, storages in storage_by_country.items(): for storage_id, storage in storages.items(): res = client.post( - f"/v1/studies/{study_id}/areas/{area_id}/storages", + f"/v1/studies/{internal_study_id}/areas/{area_id}/storages", json=storage, ) res.raise_for_status() @@ -692,7 +692,7 @@ def test_lifecycle__nominal( _it_storage3_values["enabled"] = False res = client.put( - f"/v1/studies/{study_id}/table-mode/st-storages", + f"/v1/studies/{internal_study_id}/table-mode/st-storages", json={ "fr / siemens": _fr_siemes_values, "fr / tesla": _fr_tesla_values, @@ -760,7 +760,7 @@ def test_lifecycle__nominal( assert actual == expected res = client.get( - f"/v1/studies/{study_id}/table-mode/st-storages", + f"/v1/studies/{internal_study_id}/table-mode/st-storages", params={ "columns": ",".join( [ @@ -810,7 +810,7 @@ def test_lifecycle__nominal( # Create a cluster in fr fr_id = "fr" res = client.post( - f"/v1/studies/{study_id}/areas/{fr_id}/clusters/thermal", + f"/v1/studies/{internal_study_id}/areas/{fr_id}/clusters/thermal", json={ "name": "Cluster 1", "group": "Nuclear", @@ -822,7 +822,7 @@ def test_lifecycle__nominal( # Create Binding Constraints res = client.post( - f"/v1/studies/{study_id}/bindingconstraints", + f"/v1/studies/{internal_study_id}/bindingconstraints", json={ "name": "Binding Constraint 1", "enabled": True, @@ -833,7 +833,7 @@ def test_lifecycle__nominal( assert res.status_code == 200, res.json() res = client.post( - f"/v1/studies/{study_id}/bindingconstraints", + f"/v1/studies/{internal_study_id}/bindingconstraints", json={ "name": "Binding Constraint 2", "enabled": False, @@ -874,7 +874,7 @@ def test_lifecycle__nominal( _bc2_values["group"] = "My BC Group" res = client.put( - f"/v1/studies/{study_id}/table-mode/binding-constraints", + f"/v1/studies/{internal_study_id}/table-mode/binding-constraints", json={ "binding constraint 1": _bc1_values, "binding constraint 2": _bc2_values, @@ -909,7 +909,7 @@ def test_lifecycle__nominal( assert actual == expected_binding res = client.get( - f"/v1/studies/{study_id}/table-mode/binding-constraints", + f"/v1/studies/{internal_study_id}/table-mode/binding-constraints", params={"columns": ""}, ) assert res.status_code == 200, res.json() diff --git a/tests/integration/study_data_blueprint/test_thermal.py b/tests/integration/study_data_blueprint/test_thermal.py index e3f62eca1e..33e9aef03d 100644 --- a/tests/integration/study_data_blueprint/test_thermal.py +++ b/tests/integration/study_data_blueprint/test_thermal.py @@ -287,7 +287,7 @@ class TestThermal: "version", [pytest.param(0, id="No Upgrade"), pytest.param(860, id="v8.6"), pytest.param(870, id="v8.7")] ) def test_lifecycle( - self, client: TestClient, user_access_token: str, study_id: str, admin_access_token: str, version: int + self, client: TestClient, user_access_token: str, internal_study_id: str, admin_access_token: str, version: int ) -> None: # ============================= # STUDY UPGRADE @@ -295,7 +295,7 @@ def test_lifecycle( if version != 0: res = client.put( - f"/v1/studies/{study_id}/upgrade", + f"/v1/studies/{internal_study_id}/upgrade", headers={"Authorization": f"Bearer {admin_access_token}"}, params={"target_version": version}, ) @@ -337,7 +337,7 @@ def test_lifecycle( attempts = [{}, {"name": ""}, {"name": "!??"}] for attempt in attempts: res = client.post( - f"/v1/studies/{study_id}/areas/{area_id}/clusters/thermal", + f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal", headers={"Authorization": f"Bearer {user_access_token}"}, json=attempt, ) @@ -360,7 +360,7 @@ def test_lifecycle( "marketBidCost": 181.267, } res = client.post( - f"/v1/studies/{study_id}/areas/{area_id}/clusters/thermal", + f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal", headers={"Authorization": f"Bearer {user_access_token}"}, json=fr_gas_conventional_props, ) @@ -385,7 +385,7 @@ def test_lifecycle( # reading the properties of a thermal cluster res = client.get( - f"/v1/studies/{study_id}/areas/{area_id}/clusters/thermal/{fr_gas_conventional_id}", + f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal/{fr_gas_conventional_id}", headers={"Authorization": f"Bearer {user_access_token}"}, ) assert res.status_code == 200, res.json() @@ -399,14 +399,14 @@ def test_lifecycle( matrix_path = f"input/thermal/prepro/{area_id}/{fr_gas_conventional_id.lower()}/data" args = {"target": matrix_path, "matrix": matrix} res = client.post( - f"/v1/studies/{study_id}/commands", + f"/v1/studies/{internal_study_id}/commands", json=[{"action": "replace_matrix", "args": args}], headers={"Authorization": f"Bearer {user_access_token}"}, ) assert res.status_code in {200, 201}, res.json() res = client.get( - f"/v1/studies/{study_id}/raw", + f"/v1/studies/{internal_study_id}/raw", params={"path": matrix_path}, headers={"Authorization": f"Bearer {user_access_token}"}, ) @@ -419,7 +419,7 @@ def test_lifecycle( # Reading the list of thermal clusters res = client.get( - f"/v1/studies/{study_id}/areas/{area_id}/clusters/thermal", + f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal", headers={"Authorization": f"Bearer {user_access_token}"}, ) assert res.status_code == 200, res.json() @@ -427,7 +427,7 @@ def test_lifecycle( # updating properties res = client.patch( - f"/v1/studies/{study_id}/areas/{area_id}/clusters/thermal/{fr_gas_conventional_id}", + f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal/{fr_gas_conventional_id}", headers={"Authorization": f"Bearer {user_access_token}"}, json={ "name": "FR_Gas conventional old 1", @@ -443,7 +443,7 @@ def test_lifecycle( assert res.json() == fr_gas_conventional_cfg res = client.get( - f"/v1/studies/{study_id}/areas/{area_id}/clusters/thermal/{fr_gas_conventional_id}", + f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal/{fr_gas_conventional_id}", headers={"Authorization": f"Bearer {user_access_token}"}, ) assert res.status_code == 200, res.json() @@ -455,7 +455,7 @@ def test_lifecycle( # updating properties res = client.patch( - f"/v1/studies/{study_id}/areas/{area_id}/clusters/thermal/{fr_gas_conventional_id}", + f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal/{fr_gas_conventional_id}", headers={"Authorization": f"Bearer {user_access_token}"}, json={ "marginalCost": 182.456, @@ -477,7 +477,7 @@ def test_lifecycle( # The `unitCount` property must be an integer greater than 0. bad_properties = {"unitCount": 0} res = client.patch( - f"/v1/studies/{study_id}/areas/{area_id}/clusters/thermal/{fr_gas_conventional_id}", + f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal/{fr_gas_conventional_id}", headers={"Authorization": f"Bearer {user_access_token}"}, json=bad_properties, ) @@ -486,7 +486,7 @@ def test_lifecycle( # The thermal cluster properties should not have been updated. res = client.get( - f"/v1/studies/{study_id}/areas/{area_id}/clusters/thermal/{fr_gas_conventional_id}", + f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal/{fr_gas_conventional_id}", headers={"Authorization": f"Bearer {user_access_token}"}, ) assert res.status_code == 200, res.json() @@ -494,7 +494,7 @@ def test_lifecycle( # Update with a pollutant. Should succeed even with versions prior to v8.6 res = client.patch( - f"/v1/studies/{study_id}/areas/{area_id}/clusters/thermal/{fr_gas_conventional_id}", + f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal/{fr_gas_conventional_id}", headers={"Authorization": f"Bearer {user_access_token}"}, json={"nox": 10.0}, ) @@ -503,7 +503,7 @@ def test_lifecycle( # Update with the field `efficiency`. Should succeed even with versions prior to v8.7 res = client.patch( - f"/v1/studies/{study_id}/areas/{area_id}/clusters/thermal/{fr_gas_conventional_id}", + f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal/{fr_gas_conventional_id}", headers={"Authorization": f"Bearer {user_access_token}"}, json={"efficiency": 97.0}, ) @@ -516,7 +516,7 @@ def test_lifecycle( new_name = "Duplicate of Fr_Gas_Conventional" res = client.post( - f"/v1/studies/{study_id}/areas/{area_id}/thermals/{fr_gas_conventional_id}", + f"/v1/studies/{internal_study_id}/areas/{area_id}/thermals/{fr_gas_conventional_id}", headers={"Authorization": f"Bearer {user_access_token}"}, params={"newName": new_name}, ) @@ -536,7 +536,7 @@ def test_lifecycle( # asserts the matrix has also been duplicated new_cluster_matrix_path = f"input/thermal/prepro/{area_id}/{duplicated_id.lower()}/data" res = client.get( - f"/v1/studies/{study_id}/raw", + f"/v1/studies/{internal_study_id}/raw", params={"path": new_cluster_matrix_path}, headers={"Authorization": f"Bearer {user_access_token}"}, ) @@ -549,7 +549,7 @@ def test_lifecycle( # Everything is fine at the beginning res = client.get( - f"/v1/studies/{study_id}/areas/{area_id}/clusters/thermal/{fr_gas_conventional_id}/validate", + f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal/{fr_gas_conventional_id}/validate", headers={"Authorization": f"Bearer {user_access_token}"}, ) assert res.status_code == 200 @@ -559,14 +559,14 @@ def test_lifecycle( _upload_matrix( client, user_access_token, - study_id, + internal_study_id, f"input/thermal/series/{area_id}/{fr_gas_conventional_id.lower()}/series", pd.DataFrame(np.random.randint(0, 10, size=(4, 1))), ) # Validation should fail res = client.get( - f"/v1/studies/{study_id}/areas/{area_id}/clusters/thermal/{fr_gas_conventional_id}/validate", + f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal/{fr_gas_conventional_id}/validate", headers={"Authorization": f"Bearer {user_access_token}"}, ) assert res.status_code == 422 @@ -578,14 +578,14 @@ def test_lifecycle( _upload_matrix( client, user_access_token, - study_id, + internal_study_id, f"input/thermal/series/{area_id}/{fr_gas_conventional_id.lower()}/series", pd.DataFrame(np.random.randint(0, 10, size=(8760, 4))), ) # Validation should succeed again res = client.get( - f"/v1/studies/{study_id}/areas/{area_id}/clusters/thermal/{fr_gas_conventional_id}/validate", + f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal/{fr_gas_conventional_id}/validate", headers={"Authorization": f"Bearer {user_access_token}"}, ) assert res.status_code == 200 @@ -596,14 +596,14 @@ def test_lifecycle( _upload_matrix( client, user_access_token, - study_id, + internal_study_id, f"input/thermal/series/{area_id}/{fr_gas_conventional_id.lower()}/CO2Cost", pd.DataFrame(np.random.randint(0, 10, size=(8760, 3))), ) # Validation should fail res = client.get( - f"/v1/studies/{study_id}/areas/{area_id}/clusters/thermal/{fr_gas_conventional_id}/validate", + f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal/{fr_gas_conventional_id}/validate", headers={"Authorization": f"Bearer {user_access_token}"}, ) assert res.status_code == 422 @@ -640,26 +640,44 @@ def test_lifecycle( # noinspection SpellCheckingInspection res = client.post( - f"/v1/studies/{study_id}/bindingconstraints", + f"/v1/studies/{internal_study_id}/bindingconstraints", json=bc_obj, headers={"Authorization": f"Bearer {user_access_token}"}, ) assert res.status_code in {200, 201}, res.json() - # To delete a thermal cluster, we need to provide its ID. + # verify that we can't delete the thermal cluster because it is referenced in a binding constraint res = client.request( "DELETE", - f"/v1/studies/{study_id}/areas/{area_id}/clusters/thermal", + f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal", + headers={"Authorization": f"Bearer {user_access_token}"}, + json=[fr_gas_conventional_id], + ) + assert res.status_code == 403, res.json() + description = res.json()["description"] + assert all([elm in description for elm in [fr_gas_conventional, "binding constraint"]]) + assert res.json()["exception"] == "ReferencedObjectDeletionNotAllowed" + + # delete the binding constraint + res = client.delete( + f"/v1/studies/{internal_study_id}/bindingconstraints/{bc_obj['name']}", + headers={"Authorization": f"Bearer {user_access_token}"}, + ) + assert res.status_code == 200, res.json() + + # Now we can delete the thermal cluster + res = client.request( + "DELETE", + f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal", headers={"Authorization": f"Bearer {user_access_token}"}, json=[fr_gas_conventional_id], ) assert res.status_code == 204, res.json() - assert res.text in {"", "null"} # Old FastAPI versions return 'null'. - # When we delete a thermal cluster, we should also delete the binding constraints that reference it. + # check that the binding constraint has been deleted # noinspection SpellCheckingInspection res = client.get( - f"/v1/studies/{study_id}/bindingconstraints", + f"/v1/studies/{internal_study_id}/bindingconstraints", headers={"Authorization": f"Bearer {user_access_token}"}, ) assert res.status_code == 200, res.json() @@ -668,7 +686,7 @@ def test_lifecycle( # If the thermal cluster list is empty, the deletion should be a no-op. res = client.request( "DELETE", - f"/v1/studies/{study_id}/areas/{area_id}/clusters/thermal", + f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal", headers={"Authorization": f"Bearer {user_access_token}"}, json=[], ) @@ -681,7 +699,7 @@ def test_lifecycle( other_cluster_id2 = "02_wind_on" res = client.request( "DELETE", - f"/v1/studies/{study_id}/areas/{area_id}/clusters/thermal", + f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal", headers={"Authorization": f"Bearer {user_access_token}"}, json=[other_cluster_id1, other_cluster_id2], ) @@ -690,7 +708,7 @@ def test_lifecycle( # The list of thermal clusters should not contain the deleted ones. res = client.get( - f"/v1/studies/{study_id}/areas/{area_id}/clusters/thermal", + f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal", headers={"Authorization": f"Bearer {user_access_token}"}, ) assert res.status_code == 200, res.json() @@ -706,7 +724,7 @@ def test_lifecycle( bad_area_id = "bad_area" res = client.request( "DELETE", - f"/v1/studies/{study_id}/areas/{bad_area_id}/clusters/thermal", + f"/v1/studies/{internal_study_id}/areas/{bad_area_id}/clusters/thermal", headers={"Authorization": f"Bearer {user_access_token}"}, json=[fr_gas_conventional_id], ) @@ -735,7 +753,7 @@ def test_lifecycle( # Check GET with wrong `area_id` res = client.get( - f"/v1/studies/{study_id}/areas/{bad_area_id}/clusters/thermal/{fr_gas_conventional_id}", + f"/v1/studies/{internal_study_id}/areas/{bad_area_id}/clusters/thermal/{fr_gas_conventional_id}", headers={"Authorization": f"Bearer {user_access_token}"}, ) obj = res.json() @@ -766,7 +784,7 @@ def test_lifecycle( # Check POST with wrong `area_id` res = client.post( - f"/v1/studies/{study_id}/areas/{bad_area_id}/clusters/thermal", + f"/v1/studies/{internal_study_id}/areas/{bad_area_id}/clusters/thermal", headers={"Authorization": f"Bearer {user_access_token}"}, json={ "name": fr_gas_conventional, @@ -789,7 +807,7 @@ def test_lifecycle( # Check POST with wrong `group` res = client.post( - f"/v1/studies/{study_id}/areas/{area_id}/clusters/thermal", + f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal", headers={"Authorization": f"Bearer {user_access_token}"}, json={"name": fr_gas_conventional, "group": "GroupFoo"}, ) @@ -800,7 +818,7 @@ def test_lifecycle( # Check PATCH with the wrong `area_id` res = client.patch( - f"/v1/studies/{study_id}/areas/{bad_area_id}/clusters/thermal/{fr_gas_conventional_id}", + f"/v1/studies/{internal_study_id}/areas/{bad_area_id}/clusters/thermal/{fr_gas_conventional_id}", headers={"Authorization": f"Bearer {user_access_token}"}, json={ "group": "Oil", @@ -822,7 +840,7 @@ def test_lifecycle( # Check PATCH with the wrong `cluster_id` bad_cluster_id = "bad_cluster" res = client.patch( - f"/v1/studies/{study_id}/areas/{area_id}/clusters/thermal/{bad_cluster_id}", + f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal/{bad_cluster_id}", headers={"Authorization": f"Bearer {user_access_token}"}, json={ "group": "Oil", @@ -864,7 +882,7 @@ def test_lifecycle( # Cannot duplicate a fake cluster unknown_id = "unknown" res = client.post( - f"/v1/studies/{study_id}/areas/{area_id}/thermals/{unknown_id}", + f"/v1/studies/{internal_study_id}/areas/{area_id}/thermals/{unknown_id}", headers={"Authorization": f"Bearer {user_access_token}"}, params={"newName": "duplicate"}, ) @@ -875,7 +893,7 @@ def test_lifecycle( # Cannot duplicate with an existing id res = client.post( - f"/v1/studies/{study_id}/areas/{area_id}/thermals/{duplicated_id}", + f"/v1/studies/{internal_study_id}/areas/{area_id}/thermals/{duplicated_id}", headers={"Authorization": f"Bearer {user_access_token}"}, params={"newName": new_name.upper()}, # different case but same ID ) @@ -1029,3 +1047,171 @@ def test_variant_lifecycle(self, client: TestClient, user_access_token: str, var "replace_matrix", "remove_cluster", ] + + def test_thermal_cluster_deletion(self, client: TestClient, user_access_token: str, internal_study_id: str) -> None: + """ + Test that creating a thermal cluster with invalid properties raises a validation error. + """ + + client.headers = {"Authorization": f"Bearer {user_access_token}"} + + # Create an area "area_1" in the study + res = client.post( + f"/v1/studies/{internal_study_id}/areas", + json={ + "name": "area_1", + "type": "AREA", + "metadata": {"country": "FR"}, + }, + ) + assert res.status_code == 200, res.json() + + # Create an area "area_2" in the study + res = client.post( + f"/v1/studies/{internal_study_id}/areas", + json={ + "name": "area_2", + "type": "AREA", + "metadata": {"country": "DE"}, + }, + ) + assert res.status_code == 200, res.json() + + # Create an area "area_3" in the study + res = client.post( + f"/v1/studies/{internal_study_id}/areas", + json={ + "name": "area_3", + "type": "AREA", + "metadata": {"country": "ES"}, + }, + ) + assert res.status_code == 200, res.json() + + # Create a thermal cluster in the study for area_1 + res = client.post( + f"/v1/studies/{internal_study_id}/areas/area_1/clusters/thermal", + json={ + "name": "cluster_1", + "group": "Nuclear", + "unitCount": 13, + "nominalCapacity": 42500, + "marginalCost": 0.1, + }, + ) + assert res.status_code == 200, res.json() + + # Create a thermal cluster in the study for area_2 + res = client.post( + f"/v1/studies/{internal_study_id}/areas/area_2/clusters/thermal", + json={ + "name": "cluster_2", + "group": "Nuclear", + "unitCount": 13, + "nominalCapacity": 42500, + "marginalCost": 0.1, + }, + ) + assert res.status_code == 200, res.json() + + # Create a thermal cluster in the study for area_3 + res = client.post( + f"/v1/studies/{internal_study_id}/areas/area_3/clusters/thermal", + json={ + "name": "cluster_3", + "group": "Nuclear", + "unitCount": 13, + "nominalCapacity": 42500, + "marginalCost": 0.1, + }, + ) + assert res.status_code == 200, res.json() + + # add a binding constraint that references the thermal cluster in area_1 + bc_obj = { + "name": "bc_1", + "enabled": True, + "time_step": "hourly", + "operator": "less", + "terms": [ + { + "id": "area_1.cluster_1", + "weight": 2, + "offset": 5, + "data": {"area": "area_1", "cluster": "cluster_1"}, + } + ], + } + res = client.post( + f"/v1/studies/{internal_study_id}/bindingconstraints", + json=bc_obj, + ) + assert res.status_code == 200, res.json() + + # add a binding constraint that references the thermal cluster in area_2 + bc_obj = { + "name": "bc_2", + "enabled": True, + "time_step": "hourly", + "operator": "less", + "terms": [ + { + "id": "area_2.cluster_2", + "weight": 2, + "offset": 5, + "data": {"area": "area_2", "cluster": "cluster_2"}, + } + ], + } + res = client.post( + f"/v1/studies/{internal_study_id}/bindingconstraints", + json=bc_obj, + ) + assert res.status_code == 200, res.json() + + # check that deleting the thermal cluster in area_1 fails + res = client.delete( + f"/v1/studies/{internal_study_id}/areas/area_1/clusters/thermal", + json=["cluster_1"], + ) + assert res.status_code == 403, res.json() + + # now delete the binding constraint that references the thermal cluster in area_1 + res = client.delete( + f"/v1/studies/{internal_study_id}/bindingconstraints/bc_1", + ) + assert res.status_code == 200, res.json() + + # check that deleting the thermal cluster in area_1 succeeds + res = client.delete( + f"/v1/studies/{internal_study_id}/areas/area_1/clusters/thermal", + json=["cluster_1"], + ) + assert res.status_code == 204, res.json() + + # check that deleting the thermal cluster in area_2 fails + res = client.delete( + f"/v1/studies/{internal_study_id}/areas/area_2/clusters/thermal", + json=["cluster_2"], + ) + assert res.status_code == 403, res.json() + + # now delete the binding constraint that references the thermal cluster in area_2 + res = client.delete( + f"/v1/studies/{internal_study_id}/bindingconstraints/bc_2", + ) + assert res.status_code == 200, res.json() + + # check that deleting the thermal cluster in area_2 succeeds + res = client.delete( + f"/v1/studies/{internal_study_id}/areas/area_2/clusters/thermal", + json=["cluster_2"], + ) + assert res.status_code == 204, res.json() + + # check that deleting the thermal cluster in area_3 succeeds + res = client.delete( + f"/v1/studies/{internal_study_id}/areas/area_3/clusters/thermal", + json=["cluster_3"], + ) + assert res.status_code == 204, res.json() diff --git a/tests/integration/test_integration.py b/tests/integration/test_integration.py index 55e182c7d3..f0eb8491f1 100644 --- a/tests/integration/test_integration.py +++ b/tests/integration/test_integration.py @@ -1427,8 +1427,20 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: }, } + # check that at this stage the area cannot be deleted as it is referenced in binding constraint 1 result = client.delete(f"/v1/studies/{study_id}/areas/area%201") - assert result.status_code == 200 + assert result.status_code == 403, res.json() + # verify the error message + description = result.json()["description"] + assert all([elm in description for elm in ["area 1", "binding constraint 1"]]) + # check the exception + assert result.json()["exception"] == "ReferencedObjectDeletionNotAllowed" + + # delete binding constraint 1 + client.delete(f"/v1/studies/{study_id}/bindingconstraints/binding%20constraint%201") + # check now that we can delete the area 1 + result = client.delete(f"/v1/studies/{study_id}/areas/area%201") + assert result.status_code == 200, res.json() res_areas = client.get(f"/v1/studies/{study_id}/areas") assert res_areas.json() == [ { @@ -1554,7 +1566,7 @@ def set_maintenance(value: bool) -> None: assert res.json() == message -def test_import(client: TestClient, admin_access_token: str, study_id: str) -> None: +def test_import(client: TestClient, admin_access_token: str, internal_study_id: str) -> None: client.headers = {"Authorization": f"Bearer {admin_access_token}"} zip_path = ASSETS_DIR / "STA-mini.zip" @@ -1630,12 +1642,12 @@ def test_import(client: TestClient, admin_access_token: str, study_id: str) -> N # tests outputs import for .zip output_path_zip = ASSETS_DIR / "output_adq.zip" client.post( - f"/v1/studies/{study_id}/output", + f"/v1/studies/{internal_study_id}/output", headers={"Authorization": f'Bearer {george_credentials["access_token"]}'}, files={"output": io.BytesIO(output_path_zip.read_bytes())}, ) res = client.get( - f"/v1/studies/{study_id}/outputs", + f"/v1/studies/{internal_study_id}/outputs", headers={"Authorization": f'Bearer {george_credentials["access_token"]}'}, ) assert len(res.json()) == 6 @@ -1643,12 +1655,12 @@ def test_import(client: TestClient, admin_access_token: str, study_id: str) -> N # tests outputs import for .7z output_path_seven_zip = ASSETS_DIR / "output_adq.7z" client.post( - f"/v1/studies/{study_id}/output", + f"/v1/studies/{internal_study_id}/output", headers={"Authorization": f'Bearer {george_credentials["access_token"]}'}, files={"output": io.BytesIO(output_path_seven_zip.read_bytes())}, ) res = client.get( - f"/v1/studies/{study_id}/outputs", + f"/v1/studies/{internal_study_id}/outputs", headers={"Authorization": f'Bearer {george_credentials["access_token"]}'}, ) assert len(res.json()) == 7 @@ -1676,11 +1688,11 @@ def test_import(client: TestClient, admin_access_token: str, study_id: str) -> N assert result[1]["name"] == "it.txt" -def test_copy(client: TestClient, admin_access_token: str, study_id: str) -> None: +def test_copy(client: TestClient, admin_access_token: str, internal_study_id: str) -> None: client.headers = {"Authorization": f"Bearer {admin_access_token}"} # Copy a study with admin user who belongs to a group - copied = client.post(f"/v1/studies/{study_id}/copy?dest=copied&use_task=false") + copied = client.post(f"/v1/studies/{internal_study_id}/copy?dest=copied&use_task=false") assert copied.status_code == 201 # asserts that it has admin groups and PublicMode to NONE res = client.get(f"/v1/studies/{copied.json()}").json() @@ -1693,7 +1705,7 @@ def test_copy(client: TestClient, admin_access_token: str, study_id: str) -> Non # George copies a study copied = client.post( - f"/v1/studies/{study_id}/copy?dest=copied&use_task=false", + f"/v1/studies/{internal_study_id}/copy?dest=copied&use_task=false", headers={"Authorization": f'Bearer {george_credentials["access_token"]}'}, ) assert copied.status_code == 201 @@ -1701,3 +1713,175 @@ def test_copy(client: TestClient, admin_access_token: str, study_id: str) -> Non res = client.get(f"/v1/studies/{copied.json()}").json() assert res["groups"] == [] assert res["public_mode"] == "READ" + + +def test_areas_deletion_with_binding_constraints( + client: TestClient, user_access_token: str, internal_study_id: str +) -> None: + """ + Test the deletion of areas that are referenced in binding constraints. + """ + + # set client headers to user access token + client.headers = {"Authorization": f"Bearer {user_access_token}"} + + area1_id = "france" + area2_id = "germany" + cluster_id = "nuclear power plant" + + constraint_terms = [ + { + # Link between two areas + "data": {"area1": area1_id, "area2": area2_id}, + "id": f"{area1_id}%{area2_id}", + "offset": 2, + "weight": 1.0, + }, + { + # Cluster in an area + "data": {"area": area1_id, "cluster": cluster_id.lower()}, + "id": f"{area1_id}.{cluster_id.lower()}", + "offset": 2, + "weight": 1.0, + }, + ] + + for constraint_term in constraint_terms: + # Create an area "area_1" in the study + res = client.post( + f"/v1/studies/{internal_study_id}/areas", + json={"name": area1_id.title(), "type": "AREA", "metadata": {"country": "FR"}}, + ) + res.raise_for_status() + + if set(constraint_term["data"]) == {"area1", "area2"}: + # Create a second area and a link between the two areas + res = client.post( + f"/v1/studies/{internal_study_id}/areas", + json={"name": area2_id.title(), "type": "AREA", "metadata": {"country": "DE"}}, + ) + res.raise_for_status() + res = client.post( + f"/v1/studies/{internal_study_id}/links", + json={"area1": area1_id, "area2": area2_id}, + ) + res.raise_for_status() + + elif set(constraint_term["data"]) == {"area", "cluster"}: + # Create a cluster in the first area + res = client.post( + f"/v1/studies/{internal_study_id}/areas/{area1_id}/clusters/thermal", + json={"name": cluster_id.title(), "group": "Nuclear"}, + ) + res.raise_for_status() + + else: + raise NotImplementedError(f"Unsupported constraint term: {constraint_term}") + + # create a binding constraint that references the link + bc_id = "bc_1" + bc_obj = { + "name": bc_id, + "enabled": True, + "time_step": "daily", + "operator": "less", + "terms": [constraint_term], + } + res = client.post(f"/v1/studies/{internal_study_id}/bindingconstraints", json=bc_obj) + res.raise_for_status() + + if set(constraint_term["data"]) == {"area1", "area2"}: + areas_to_delete = [area1_id, area2_id] + elif set(constraint_term["data"]) == {"area", "cluster"}: + areas_to_delete = [area1_id] + else: + raise NotImplementedError(f"Unsupported constraint term: {constraint_term}") + + for area_id in areas_to_delete: + # try to delete the areas + res = client.delete(f"/v1/studies/{internal_study_id}/areas/{area_id}") + assert res.status_code == 403, res.json() + description = res.json()["description"] + assert all([elm in description for elm in [area_id, bc_id]]) + assert res.json()["exception"] == "ReferencedObjectDeletionNotAllowed" + + # delete the binding constraint + res = client.delete(f"/v1/studies/{internal_study_id}/bindingconstraints/{bc_id}") + assert res.status_code == 200, res.json() + + for area_id in areas_to_delete: + # delete the area + res = client.delete(f"/v1/studies/{internal_study_id}/areas/{area_id}") + assert res.status_code == 200, res.json() + + +def test_links_deletion_with_binding_constraints( + client: TestClient, user_access_token: str, internal_study_id: str +) -> None: + """ + Test the deletion of links that are referenced in binding constraints. + """ + + # set client headers to user access token + client.headers = {"Authorization": f"Bearer {user_access_token}"} + + # Create an area "area_1" in the study + res = client.post( + f"/v1/studies/{internal_study_id}/areas", + json={ + "name": "area_1", + "type": "AREA", + "metadata": {"country": "FR"}, + }, + ) + assert res.status_code == 200, res.json() + + # Create an area "area_2" in the study + res = client.post( + f"/v1/studies/{internal_study_id}/areas", + json={ + "name": "area_2", + "type": "AREA", + "metadata": {"country": "DE"}, + }, + ) + assert res.status_code == 200, res.json() + + # create a link between the two areas + res = client.post( + f"/v1/studies/{internal_study_id}/links", + json={"area1": "area_1", "area2": "area_2"}, + ) + assert res.status_code == 200, res.json() + + # create a binding constraint that references the link + bc_obj = { + "name": "bc_1", + "enabled": True, + "time_step": "hourly", + "operator": "less", + "terms": [ + { + "id": "area_1%area_2", + "weight": 2, + "data": {"area1": "area_1", "area2": "area_2"}, + } + ], + } + res = client.post(f"/v1/studies/{internal_study_id}/bindingconstraints", json=bc_obj) + assert res.status_code == 200, res.json() + + # try to delete the link before deleting the binding constraint + res = client.delete(f"/v1/studies/{internal_study_id}/links/area_1/area_2") + assert res.status_code == 403, res.json() + description = res.json()["description"] + assert all([elm in description for elm in ["area_1%area_2", "bc_1"]]) + assert res.json()["exception"] == "ReferencedObjectDeletionNotAllowed" + + # delete the binding constraint + res = client.delete(f"/v1/studies/{internal_study_id}/bindingconstraints/bc_1") + assert res.status_code == 200, res.json() + + # delete the link + res = client.delete(f"/v1/studies/{internal_study_id}/links/area_1/area_2") + assert res.status_code == 200, res.json() diff --git a/tests/integration/test_studies_upgrade.py b/tests/integration/test_studies_upgrade.py index f734223344..3eb92522af 100644 --- a/tests/integration/test_studies_upgrade.py +++ b/tests/integration/test_studies_upgrade.py @@ -11,9 +11,9 @@ class TestStudyUpgrade: @pytest.mark.skipif(RUN_ON_WINDOWS, reason="This test runs randomly on Windows") - def test_upgrade_study__next_version(self, client: TestClient, user_access_token: str, study_id: str): + def test_upgrade_study__next_version(self, client: TestClient, user_access_token: str, internal_study_id: str): res = client.put( - f"/v1/studies/{study_id}/upgrade", + f"/v1/studies/{internal_study_id}/upgrade", headers={"Authorization": f"Bearer {user_access_token}"}, ) assert res.status_code == 200 @@ -24,10 +24,10 @@ def test_upgrade_study__next_version(self, client: TestClient, user_access_token assert "710" in task.result.message, f"Version not in {task.result.message=}" @pytest.mark.skipif(RUN_ON_WINDOWS, reason="This test runs randomly on Windows") - def test_upgrade_study__target_version(self, client: TestClient, user_access_token: str, study_id: str): + def test_upgrade_study__target_version(self, client: TestClient, user_access_token: str, internal_study_id: str): target_version = "720" res = client.put( - f"/v1/studies/{study_id}/upgrade", + f"/v1/studies/{internal_study_id}/upgrade", headers={"Authorization": f"Bearer {user_access_token}"}, params={"target_version": target_version}, ) @@ -39,10 +39,12 @@ def test_upgrade_study__target_version(self, client: TestClient, user_access_tok assert target_version in task.result.message, f"Version not in {task.result.message=}" @pytest.mark.skipif(RUN_ON_WINDOWS, reason="This test runs randomly on Windows") - def test_upgrade_study__bad_target_version(self, client: TestClient, user_access_token: str, study_id: str): + def test_upgrade_study__bad_target_version( + self, client: TestClient, user_access_token: str, internal_study_id: str + ): target_version = "999" res = client.put( - f"/v1/studies/{study_id}/upgrade", + f"/v1/studies/{internal_study_id}/upgrade", headers={"Authorization": f"Bearer {user_access_token}"}, params={"target_version": target_version}, ) diff --git a/tests/integration/variant_blueprint/test_renewable_cluster.py b/tests/integration/variant_blueprint/test_renewable_cluster.py index 3dab4a946b..e518244ff9 100644 --- a/tests/integration/variant_blueprint/test_renewable_cluster.py +++ b/tests/integration/variant_blueprint/test_renewable_cluster.py @@ -21,7 +21,7 @@ def test_lifecycle( self, client: TestClient, user_access_token: str, - study_id: str, + internal_study_id: str, ) -> None: # sourcery skip: extract-duplicate-method @@ -32,7 +32,7 @@ def test_lifecycle( # We have an "old" study that we need to upgrade to version 810 min_study_version = 810 res = client.put( - f"/v1/studies/{study_id}/upgrade", + f"/v1/studies/{internal_study_id}/upgrade", headers={"Authorization": f"Bearer {user_access_token}"}, params={"target_version": min_study_version}, ) @@ -51,7 +51,7 @@ def test_lifecycle( "data": {"renewable-generation-modelling": "clusters"}, } res = client.post( - f"/v1/studies/{study_id}/commands", + f"/v1/studies/{internal_study_id}/commands", headers={"Authorization": f"Bearer {user_access_token}"}, json=[{"action": "update_config", "args": args}], ) @@ -76,7 +76,7 @@ def test_lifecycle( }, } res = client.post( - f"/v1/studies/{study_id}/commands", + f"/v1/studies/{internal_study_id}/commands", headers={"Authorization": f"Bearer {user_access_token}"}, json=[{"action": "create_renewables_cluster", "args": args}], ) @@ -97,7 +97,7 @@ def test_lifecycle( }, } res = client.post( - f"/v1/studies/{study_id}/commands", + f"/v1/studies/{internal_study_id}/commands", headers={"Authorization": f"Bearer {user_access_token}"}, json=[{"action": "create_renewables_cluster", "args": args}], ) @@ -105,7 +105,7 @@ def test_lifecycle( # Check the properties of the renewable clusters in the "FR" area res = client.get( - f"/v1/studies/{study_id}/areas/{area_fr_id}/clusters/renewable/{cluster_fr1_id}", + f"/v1/studies/{internal_study_id}/areas/{area_fr_id}/clusters/renewable/{cluster_fr1_id}", headers={"Authorization": f"Bearer {user_access_token}"}, ) res.raise_for_status() @@ -122,7 +122,7 @@ def test_lifecycle( assert properties == expected res = client.get( - f"/v1/studies/{study_id}/areas/{area_fr_id}/clusters/renewable/{cluster_fr2_id}", + f"/v1/studies/{internal_study_id}/areas/{area_fr_id}/clusters/renewable/{cluster_fr2_id}", headers={"Authorization": f"Bearer {user_access_token}"}, ) res.raise_for_status() @@ -156,7 +156,7 @@ def test_lifecycle( "matrix": values_fr2.tolist(), } res = client.post( - f"/v1/studies/{study_id}/commands", + f"/v1/studies/{internal_study_id}/commands", headers={"Authorization": f"Bearer {user_access_token}"}, json=[ {"action": "replace_matrix", "args": args_fr1}, @@ -167,7 +167,7 @@ def test_lifecycle( # Check the matrices of the renewable clusters in the "FR" area res = client.get( - f"/v1/studies/{study_id}/raw?path=input/renewables/series/{area_fr_id}/{series_fr1_id}/series", + f"/v1/studies/{internal_study_id}/raw?path=input/renewables/series/{area_fr_id}/{series_fr1_id}/series", headers={"Authorization": f"Bearer {user_access_token}"}, ) res.raise_for_status() @@ -175,7 +175,7 @@ def test_lifecycle( assert np.array(matrix_fr1["data"], dtype=np.float64).all() == values_fr1.all() res = client.get( - f"/v1/studies/{study_id}/raw?path=input/renewables/series/{area_fr_id}/{series_fr2_id}/series", + f"/v1/studies/{internal_study_id}/raw?path=input/renewables/series/{area_fr_id}/{series_fr2_id}/series", headers={"Authorization": f"Bearer {user_access_token}"}, ) res.raise_for_status() @@ -202,14 +202,14 @@ def test_lifecycle( }, } res = client.post( - f"/v1/studies/{study_id}/commands", + f"/v1/studies/{internal_study_id}/commands", headers={"Authorization": f"Bearer {user_access_token}"}, json=[{"action": "create_renewables_cluster", "args": args}], ) res.raise_for_status() res = client.get( - f"/v1/studies/{study_id}/areas/{area_it_id}/clusters/renewable/{cluster_it1_id}", + f"/v1/studies/{internal_study_id}/areas/{area_it_id}/clusters/renewable/{cluster_it1_id}", headers={"Authorization": f"Bearer {user_access_token}"}, ) res.raise_for_status() @@ -228,7 +228,7 @@ def test_lifecycle( # Check the matrices of the renewable clusters in the "IT" area series_it1_id = cluster_it1_id.lower() # Series IDs are in lower case res = client.get( - f"/v1/studies/{study_id}/raw?path=input/renewables/series/{area_it_id}/{series_it1_id}/series", + f"/v1/studies/{internal_study_id}/raw?path=input/renewables/series/{area_it_id}/{series_it1_id}/series", headers={"Authorization": f"Bearer {user_access_token}"}, ) res.raise_for_status() @@ -242,7 +242,7 @@ def test_lifecycle( # The `remove_renewables_cluster` command allows you to delete a Renewable Cluster. args = {"area_id": area_fr_id, "cluster_id": cluster_fr2_id} res = client.post( - f"/v1/studies/{study_id}/commands", + f"/v1/studies/{internal_study_id}/commands", headers={"Authorization": f"Bearer {user_access_token}"}, json=[{"action": "remove_renewables_cluster", "args": args}], ) @@ -250,7 +250,7 @@ def test_lifecycle( # Check the properties of all renewable clusters res = client.get( - f"/v1/studies/{study_id}/raw?path=input/renewables/clusters&depth=4", + f"/v1/studies/{internal_study_id}/raw?path=input/renewables/clusters&depth=4", headers={"Authorization": f"Bearer {user_access_token}"}, ) res.raise_for_status() @@ -284,7 +284,7 @@ def test_lifecycle( # The `remove_renewables_cluster` command allows you to delete a Renewable Cluster. args = {"area_id": area_fr_id, "cluster_id": cluster_fr1_id} res = client.post( - f"/v1/studies/{study_id}/commands", + f"/v1/studies/{internal_study_id}/commands", headers={"Authorization": f"Bearer {user_access_token}"}, json=[{"action": "remove_renewables_cluster", "args": args}], ) @@ -292,7 +292,7 @@ def test_lifecycle( # Check the properties of all renewable clusters res = client.get( - f"/v1/studies/{study_id}/raw?path=input/renewables/clusters&depth=4", + f"/v1/studies/{internal_study_id}/raw?path=input/renewables/clusters&depth=4", headers={"Authorization": f"Bearer {user_access_token}"}, ) res.raise_for_status() @@ -319,7 +319,7 @@ def test_lifecycle( # this behavior is not yet implemented, so you will encounter a 500 error. args = {"area_id": area_fr_id, "cluster_id": cluster_fr2_id} res = client.post( - f"/v1/studies/{study_id}/commands", + f"/v1/studies/{internal_study_id}/commands", headers={"Authorization": f"Bearer {user_access_token}"}, json=[{"action": "remove_renewables_cluster", "args": args}], ) diff --git a/tests/integration/variant_blueprint/test_st_storage.py b/tests/integration/variant_blueprint/test_st_storage.py index c0f531ddb4..c28af6790d 100644 --- a/tests/integration/variant_blueprint/test_st_storage.py +++ b/tests/integration/variant_blueprint/test_st_storage.py @@ -22,7 +22,7 @@ def test_lifecycle( self, client: TestClient, user_access_token: str, - study_id: str, + internal_study_id: str, ): # ======================= # Study version upgrade @@ -31,7 +31,7 @@ def test_lifecycle( # We have an "old" study that we need to upgrade to version 860 min_study_version = 860 res = client.put( - f"/v1/studies/{study_id}/upgrade", + f"/v1/studies/{internal_study_id}/upgrade", headers={"Authorization": f"Bearer {user_access_token}"}, params={"target_version": min_study_version}, ) @@ -42,12 +42,12 @@ def test_lifecycle( # We can check that the study is upgraded to the required version res = client.get( - f"/v1/studies/{study_id}", + f"/v1/studies/{internal_study_id}", headers={"Authorization": f"Bearer {user_access_token}"}, ) res.raise_for_status() assert res.json() == { - "id": study_id, + "id": internal_study_id, "name": "STA-mini", "version": min_study_version, "created": ANY, # ISO8601 Date/time @@ -69,7 +69,7 @@ def test_lifecycle( # Here is the list of available areas res = client.get( - f"/v1/studies/{study_id}/areas", + f"/v1/studies/{internal_study_id}/areas", headers={"Authorization": f"Bearer {user_access_token}"}, ) res.raise_for_status() @@ -104,7 +104,7 @@ def test_lifecycle( }, } res = client.post( - f"/v1/studies/{study_id}/commands", + f"/v1/studies/{internal_study_id}/commands", headers={"Authorization": f"Bearer {user_access_token}"}, json=[{"action": "create_st_storage", "args": args}], ) @@ -130,7 +130,7 @@ def test_lifecycle( "matrix": pmax_injection.tolist(), } res = client.post( - f"/v1/studies/{study_id}/commands", + f"/v1/studies/{internal_study_id}/commands", headers={"Authorization": f"Bearer {user_access_token}"}, json=[ {"action": "replace_matrix", "args": args1}, @@ -166,7 +166,7 @@ def test_lifecycle( "inflows": inflows.tolist(), } res = client.post( - f"/v1/studies/{study_id}/commands", + f"/v1/studies/{internal_study_id}/commands", headers={"Authorization": f"Bearer {user_access_token}"}, json=[{"action": "create_st_storage", "args": args}], ) @@ -179,7 +179,7 @@ def test_lifecycle( # The `remove_st_storage` command allows you to delete a Short-Term Storage. args = {"area_id": area_id, "storage_id": siemens_battery_id} res = client.post( - f"/v1/studies/{study_id}/commands", + f"/v1/studies/{internal_study_id}/commands", headers={"Authorization": f"Bearer {user_access_token}"}, json=[{"action": "remove_st_storage", "args": args}], ) @@ -215,7 +215,7 @@ def test_lifecycle( "inflows": inflows.tolist(), } res = client.post( - f"/v1/studies/{study_id}/commands", + f"/v1/studies/{internal_study_id}/commands", headers={"Authorization": f"Bearer {user_access_token}"}, json=[{"action": "create_st_storage", "args": args}], ) diff --git a/tests/integration/variant_blueprint/test_thermal_cluster.py b/tests/integration/variant_blueprint/test_thermal_cluster.py index 567b974a80..245fd8a02a 100644 --- a/tests/integration/variant_blueprint/test_thermal_cluster.py +++ b/tests/integration/variant_blueprint/test_thermal_cluster.py @@ -43,7 +43,7 @@ def test_cascade_update( self, client: TestClient, user_access_token: str, - study_id: str, + internal_study_id: str, ) -> None: """ This test is based on the study "STA-mini.zip", which is a RAW study. @@ -54,7 +54,7 @@ def test_cascade_update( """ # First, we create a copy of the study, and we convert it to a managed study. res = client.post( - f"/v1/studies/{study_id}/copy", + f"/v1/studies/{internal_study_id}/copy", headers={"Authorization": f"Bearer {user_access_token}"}, params={"dest": "default", "with_outputs": False, "use_task": False}, ) diff --git a/tests/storage/integration/data/set_id_annual.py b/tests/storage/integration/data/set_id_annual.py new file mode 100644 index 0000000000..dde1a172e0 --- /dev/null +++ b/tests/storage/integration/data/set_id_annual.py @@ -0,0 +1,327 @@ +import math + +set_id_annual = { + "columns": [ + ("OP. COST", "Euro", "min"), + ("OP. COST", "Euro", "max"), + ("MRG. PRICE", "Euro", "min"), + ("MRG. PRICE", "Euro", "max"), + ("CO2 EMIS.", "Tons", "min"), + ("CO2 EMIS.", "Tons", "max"), + ("NH3 EMIS.", "Tons", "min"), + ("NH3 EMIS.", "Tons", "max"), + ("SO2 EMIS.", "Tons", "min"), + ("SO2 EMIS.", "Tons", "max"), + ("NOX EMIS.", "Tons", "min"), + ("NOX EMIS.", "Tons", "max"), + ("PM2_5 EMIS.", "Tons", "min"), + ("PM2_5 EMIS.", "Tons", "max"), + ("PM5 EMIS.", "Tons", "min"), + ("PM5 EMIS.", "Tons", "max"), + ("PM10 EMIS.", "Tons", "min"), + ("PM10 EMIS.", "Tons", "max"), + ("NMVOC EMIS.", "Tons", "min"), + ("NMVOC EMIS.", "Tons", "max"), + ("OP1 EMIS.", "Tons", "min"), + ("OP1 EMIS.", "Tons", "max"), + ("OP2 EMIS.", "Tons", "min"), + ("OP2 EMIS.", "Tons", "max"), + ("OP3 EMIS.", "Tons", "min"), + ("OP3 EMIS.", "Tons", "max"), + ("OP4 EMIS.", "Tons", "min"), + ("OP4 EMIS.", "Tons", "max"), + ("OP5 EMIS.", "Tons", "min"), + ("OP5 EMIS.", "Tons", "max"), + ("BALANCE", "MWh", "min"), + ("BALANCE", "MWh", "max"), + ("LOAD", "MWh", "min"), + ("LOAD", "MWh", "max"), + ("H. ROR", "MWh", "min"), + ("H. ROR", "MWh", "max"), + ("NUCLEAR", "MWh", "min"), + ("NUCLEAR", "MWh", "max"), + ("LIGNITE", "MWh", "min"), + ("LIGNITE", "MWh", "max"), + ("COAL", "MWh", "min"), + ("COAL", "MWh", "max"), + ("GAS", "MWh", "min"), + ("GAS", "MWh", "max"), + ("OIL", "MWh", "min"), + ("OIL", "MWh", "max"), + ("MIX. FUEL", "MWh", "min"), + ("MIX. FUEL", "MWh", "max"), + ("MISC. DTG", "MWh", "min"), + ("MISC. DTG", "MWh", "max"), + ("MISC. DTG 2", "MWh", "min"), + ("MISC. DTG 2", "MWh", "max"), + ("MISC. DTG 3", "MWh", "min"), + ("MISC. DTG 3", "MWh", "max"), + ("MISC. DTG 4", "MWh", "min"), + ("MISC. DTG 4", "MWh", "max"), + ("WIND OFFSHORE", "MWh", "min"), + ("WIND OFFSHORE", "MWh", "max"), + ("WIND ONSHORE", "MWh", "min"), + ("WIND ONSHORE", "MWh", "max"), + ("SOLAR CONCRT.", "MWh", "min"), + ("SOLAR CONCRT.", "MWh", "max"), + ("SOLAR PV", "MWh", "min"), + ("SOLAR PV", "MWh", "max"), + ("SOLAR ROOFT", "MWh", "min"), + ("SOLAR ROOFT", "MWh", "max"), + ("RENW. 1", "MWh", "min"), + ("RENW. 1", "MWh", "max"), + ("RENW. 2", "MWh", "min"), + ("RENW. 2", "MWh", "max"), + ("RENW. 3", "MWh", "min"), + ("RENW. 3", "MWh", "max"), + ("RENW. 4", "MWh", "min"), + ("RENW. 4", "MWh", "max"), + ("H. STOR", "MWh", "min"), + ("H. STOR", "MWh", "max"), + ("H. PUMP", "MWh", "min"), + ("H. PUMP", "MWh", "max"), + ("H. LEV", "%", "min"), + ("H. LEV", "%", "max"), + ("H. INFL", "MWh", "min"), + ("H. INFL", "MWh", "max"), + ("H. OVFL", "%", "min"), + ("H. OVFL", "%", "max"), + ("H. VAL", "Euro/MWh", "min"), + ("H. VAL", "Euro/MWh", "max"), + ("H. COST", "Euro", "min"), + ("H. COST", "Euro", "max"), + ("PSP_open_injection", "MW", "min"), + ("PSP_open_injection", "MW", "max"), + ("PSP_open_withdrawal", "MW", "min"), + ("PSP_open_withdrawal", "MW", "max"), + ("PSP_open_level", "MWh", "min"), + ("PSP_open_level", "MWh", "max"), + ("PSP_closed_injection", "MW", "min"), + ("PSP_closed_injection", "MW", "max"), + ("PSP_closed_withdrawal", "MW", "min"), + ("PSP_closed_withdrawal", "MW", "max"), + ("PSP_closed_level", "MWh", "min"), + ("PSP_closed_level", "MWh", "max"), + ("Pondage_injection", "MW", "min"), + ("Pondage_injection", "MW", "max"), + ("Pondage_withdrawal", "MW", "min"), + ("Pondage_withdrawal", "MW", "max"), + ("Pondage_level", "MWh", "min"), + ("Pondage_level", "MWh", "max"), + ("Battery_injection", "MW", "min"), + ("Battery_injection", "MW", "max"), + ("Battery_withdrawal", "MW", "min"), + ("Battery_withdrawal", "MW", "max"), + ("Battery_level", "MWh", "min"), + ("Battery_level", "MWh", "max"), + ("Other1_injection", "MW", "min"), + ("Other1_injection", "MW", "max"), + ("Other1_withdrawal", "MW", "min"), + ("Other1_withdrawal", "MW", "max"), + ("Other1_level", "MWh", "min"), + ("Other1_level", "MWh", "max"), + ("Other2_injection", "MW", "min"), + ("Other2_injection", "MW", "max"), + ("Other2_withdrawal", "MW", "min"), + ("Other2_withdrawal", "MW", "max"), + ("Other2_level", "MWh", "min"), + ("Other2_level", "MWh", "max"), + ("Other3_injection", "MW", "min"), + ("Other3_injection", "MW", "max"), + ("Other3_withdrawal", "MW", "min"), + ("Other3_withdrawal", "MW", "max"), + ("Other3_level", "MWh", "min"), + ("Other3_level", "MWh", "max"), + ("Other4_injection", "MW", "min"), + ("Other4_injection", "MW", "max"), + ("Other4_withdrawal", "MW", "min"), + ("Other4_withdrawal", "MW", "max"), + ("Other4_level", "MWh", "min"), + ("Other4_level", "MWh", "max"), + ("Other5_injection", "MW", "min"), + ("Other5_injection", "MW", "max"), + ("Other5_withdrawal", "MW", "min"), + ("Other5_withdrawal", "MW", "max"), + ("Other5_level", "MWh", "min"), + ("Other5_level", "MWh", "max"), + ("UNSP. ENRG", "MWh", "min"), + ("UNSP. ENRG", "MWh", "max"), + ("SPIL. ENRG", "MWh", "min"), + ("SPIL. ENRG", "MWh", "max"), + ("LOLD", "Hours", "min"), + ("LOLD", "Hours", "max"), + ("AVL DTG", "MWh", "min"), + ("AVL DTG", "MWh", "max"), + ("DTG MRG", "MWh", "min"), + ("DTG MRG", "MWh", "max"), + ("MAX MRG", "MWh", "min"), + ("MAX MRG", "MWh", "max"), + ("NP COST", "Euro", "min"), + ("NP COST", "Euro", "max"), + ("NODU", " ", "min"), + ("NODU", " ", "max"), + ], + "data": [ + [ + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + math.nan, + math.nan, + 1.0, + 1.0, + math.nan, + math.nan, + math.nan, + math.nan, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + ] + ], + "index": ["Annual"], +} diff --git a/tests/storage/integration/data/set_values_monthly.py b/tests/storage/integration/data/set_values_monthly.py new file mode 100644 index 0000000000..0cc1ad2d32 --- /dev/null +++ b/tests/storage/integration/data/set_values_monthly.py @@ -0,0 +1,4206 @@ +import math + +set_values_monthly = { + "columns": [ + ("OV. COST", "Euro", "EXP"), + ("OP. COST", "Euro", "EXP"), + ("OP. COST", "Euro", "std"), + ("OP. COST", "Euro", "min"), + ("OP. COST", "Euro", "max"), + ("MRG. PRICE", "Euro", "EXP"), + ("MRG. PRICE", "Euro", "std"), + ("MRG. PRICE", "Euro", "min"), + ("MRG. PRICE", "Euro", "max"), + ("CO2 EMIS.", "Tons", "EXP"), + ("CO2 EMIS.", "Tons", "std"), + ("CO2 EMIS.", "Tons", "min"), + ("CO2 EMIS.", "Tons", "max"), + ("NH3 EMIS.", "Tons", "EXP"), + ("NH3 EMIS.", "Tons", "std"), + ("NH3 EMIS.", "Tons", "min"), + ("NH3 EMIS.", "Tons", "max"), + ("SO2 EMIS.", "Tons", "EXP"), + ("SO2 EMIS.", "Tons", "std"), + ("SO2 EMIS.", "Tons", "min"), + ("SO2 EMIS.", "Tons", "max"), + ("NOX EMIS.", "Tons", "EXP"), + ("NOX EMIS.", "Tons", "std"), + ("NOX EMIS.", "Tons", "min"), + ("NOX EMIS.", "Tons", "max"), + ("PM2_5 EMIS.", "Tons", "EXP"), + ("PM2_5 EMIS.", "Tons", "std"), + ("PM2_5 EMIS.", "Tons", "min"), + ("PM2_5 EMIS.", "Tons", "max"), + ("PM5 EMIS.", "Tons", "EXP"), + ("PM5 EMIS.", "Tons", "std"), + ("PM5 EMIS.", "Tons", "min"), + ("PM5 EMIS.", "Tons", "max"), + ("PM10 EMIS.", "Tons", "EXP"), + ("PM10 EMIS.", "Tons", "std"), + ("PM10 EMIS.", "Tons", "min"), + ("PM10 EMIS.", "Tons", "max"), + ("NMVOC EMIS.", "Tons", "EXP"), + ("NMVOC EMIS.", "Tons", "std"), + ("NMVOC EMIS.", "Tons", "min"), + ("NMVOC EMIS.", "Tons", "max"), + ("OP1 EMIS.", "Tons", "EXP"), + ("OP1 EMIS.", "Tons", "std"), + ("OP1 EMIS.", "Tons", "min"), + ("OP1 EMIS.", "Tons", "max"), + ("OP2 EMIS.", "Tons", "EXP"), + ("OP2 EMIS.", "Tons", "std"), + ("OP2 EMIS.", "Tons", "min"), + ("OP2 EMIS.", "Tons", "max"), + ("OP3 EMIS.", "Tons", "EXP"), + ("OP3 EMIS.", "Tons", "std"), + ("OP3 EMIS.", "Tons", "min"), + ("OP3 EMIS.", "Tons", "max"), + ("OP4 EMIS.", "Tons", "EXP"), + ("OP4 EMIS.", "Tons", "std"), + ("OP4 EMIS.", "Tons", "min"), + ("OP4 EMIS.", "Tons", "max"), + ("OP5 EMIS.", "Tons", "EXP"), + ("OP5 EMIS.", "Tons", "std"), + ("OP5 EMIS.", "Tons", "min"), + ("OP5 EMIS.", "Tons", "max"), + ("BALANCE", "MWh", "EXP"), + ("BALANCE", "MWh", "std"), + ("BALANCE", "MWh", "min"), + ("BALANCE", "MWh", "max"), + ("ROW BAL.", "MWh", "values"), + ("PSP", "MWh", "EXP"), + ("MISC. NDG", "MWh", "EXP"), + ("LOAD", "MWh", "EXP"), + ("LOAD", "MWh", "std"), + ("LOAD", "MWh", "min"), + ("LOAD", "MWh", "max"), + ("H. ROR", "MWh", "EXP"), + ("H. ROR", "MWh", "std"), + ("H. ROR", "MWh", "min"), + ("H. ROR", "MWh", "max"), + ("NUCLEAR", "MWh", "EXP"), + ("NUCLEAR", "MWh", "std"), + ("NUCLEAR", "MWh", "min"), + ("NUCLEAR", "MWh", "max"), + ("LIGNITE", "MWh", "EXP"), + ("LIGNITE", "MWh", "std"), + ("LIGNITE", "MWh", "min"), + ("LIGNITE", "MWh", "max"), + ("COAL", "MWh", "EXP"), + ("COAL", "MWh", "std"), + ("COAL", "MWh", "min"), + ("COAL", "MWh", "max"), + ("GAS", "MWh", "EXP"), + ("GAS", "MWh", "std"), + ("GAS", "MWh", "min"), + ("GAS", "MWh", "max"), + ("OIL", "MWh", "EXP"), + ("OIL", "MWh", "std"), + ("OIL", "MWh", "min"), + ("OIL", "MWh", "max"), + ("MIX. FUEL", "MWh", "EXP"), + ("MIX. FUEL", "MWh", "std"), + ("MIX. FUEL", "MWh", "min"), + ("MIX. FUEL", "MWh", "max"), + ("MISC. DTG", "MWh", "EXP"), + ("MISC. DTG", "MWh", "std"), + ("MISC. DTG", "MWh", "min"), + ("MISC. DTG", "MWh", "max"), + ("MISC. DTG 2", "MWh", "EXP"), + ("MISC. DTG 2", "MWh", "std"), + ("MISC. DTG 2", "MWh", "min"), + ("MISC. DTG 2", "MWh", "max"), + ("MISC. DTG 3", "MWh", "EXP"), + ("MISC. DTG 3", "MWh", "std"), + ("MISC. DTG 3", "MWh", "min"), + ("MISC. DTG 3", "MWh", "max"), + ("MISC. DTG 4", "MWh", "EXP"), + ("MISC. DTG 4", "MWh", "std"), + ("MISC. DTG 4", "MWh", "min"), + ("MISC. DTG 4", "MWh", "max"), + ("WIND OFFSHORE", "MWh", "EXP"), + ("WIND OFFSHORE", "MWh", "std"), + ("WIND OFFSHORE", "MWh", "min"), + ("WIND OFFSHORE", "MWh", "max"), + ("WIND ONSHORE", "MWh", "EXP"), + ("WIND ONSHORE", "MWh", "std"), + ("WIND ONSHORE", "MWh", "min"), + ("WIND ONSHORE", "MWh", "max"), + ("SOLAR CONCRT.", "MWh", "EXP"), + ("SOLAR CONCRT.", "MWh", "std"), + ("SOLAR CONCRT.", "MWh", "min"), + ("SOLAR CONCRT.", "MWh", "max"), + ("SOLAR PV", "MWh", "EXP"), + ("SOLAR PV", "MWh", "std"), + ("SOLAR PV", "MWh", "min"), + ("SOLAR PV", "MWh", "max"), + ("SOLAR ROOFT", "MWh", "EXP"), + ("SOLAR ROOFT", "MWh", "std"), + ("SOLAR ROOFT", "MWh", "min"), + ("SOLAR ROOFT", "MWh", "max"), + ("RENW. 1", "MWh", "EXP"), + ("RENW. 1", "MWh", "std"), + ("RENW. 1", "MWh", "min"), + ("RENW. 1", "MWh", "max"), + ("RENW. 2", "MWh", "EXP"), + ("RENW. 2", "MWh", "std"), + ("RENW. 2", "MWh", "min"), + ("RENW. 2", "MWh", "max"), + ("RENW. 3", "MWh", "EXP"), + ("RENW. 3", "MWh", "std"), + ("RENW. 3", "MWh", "min"), + ("RENW. 3", "MWh", "max"), + ("RENW. 4", "MWh", "EXP"), + ("RENW. 4", "MWh", "std"), + ("RENW. 4", "MWh", "min"), + ("RENW. 4", "MWh", "max"), + ("H. STOR", "MWh", "EXP"), + ("H. STOR", "MWh", "std"), + ("H. STOR", "MWh", "min"), + ("H. STOR", "MWh", "max"), + ("H. PUMP", "MWh", "EXP"), + ("H. PUMP", "MWh", "std"), + ("H. PUMP", "MWh", "min"), + ("H. PUMP", "MWh", "max"), + ("H. LEV", "%", "EXP"), + ("H. LEV", "%", "std"), + ("H. LEV", "%", "min"), + ("H. LEV", "%", "max"), + ("H. INFL", "MWh", "EXP"), + ("H. INFL", "MWh", "std"), + ("H. INFL", "MWh", "min"), + ("H. INFL", "MWh", "max"), + ("H. OVFL", "%", "EXP"), + ("H. OVFL", "%", "std"), + ("H. OVFL", "%", "min"), + ("H. OVFL", "%", "max"), + ("H. VAL", "Euro/MWh", "EXP"), + ("H. VAL", "Euro/MWh", "std"), + ("H. VAL", "Euro/MWh", "min"), + ("H. VAL", "Euro/MWh", "max"), + ("H. COST", "Euro", "EXP"), + ("H. COST", "Euro", "std"), + ("H. COST", "Euro", "min"), + ("H. COST", "Euro", "max"), + ("PSP_open_injection", "MW", "EXP"), + ("PSP_open_injection", "MW", "std"), + ("PSP_open_injection", "MW", "min"), + ("PSP_open_injection", "MW", "max"), + ("PSP_open_withdrawal", "MW", "EXP"), + ("PSP_open_withdrawal", "MW", "std"), + ("PSP_open_withdrawal", "MW", "min"), + ("PSP_open_withdrawal", "MW", "max"), + ("PSP_open_level", "MWh", "EXP"), + ("PSP_open_level", "MWh", "std"), + ("PSP_open_level", "MWh", "min"), + ("PSP_open_level", "MWh", "max"), + ("PSP_closed_injection", "MW", "EXP"), + ("PSP_closed_injection", "MW", "std"), + ("PSP_closed_injection", "MW", "min"), + ("PSP_closed_injection", "MW", "max"), + ("PSP_closed_withdrawal", "MW", "EXP"), + ("PSP_closed_withdrawal", "MW", "std"), + ("PSP_closed_withdrawal", "MW", "min"), + ("PSP_closed_withdrawal", "MW", "max"), + ("PSP_closed_level", "MWh", "EXP"), + ("PSP_closed_level", "MWh", "std"), + ("PSP_closed_level", "MWh", "min"), + ("PSP_closed_level", "MWh", "max"), + ("Pondage_injection", "MW", "EXP"), + ("Pondage_injection", "MW", "std"), + ("Pondage_injection", "MW", "min"), + ("Pondage_injection", "MW", "max"), + ("Pondage_withdrawal", "MW", "EXP"), + ("Pondage_withdrawal", "MW", "std"), + ("Pondage_withdrawal", "MW", "min"), + ("Pondage_withdrawal", "MW", "max"), + ("Pondage_level", "MWh", "EXP"), + ("Pondage_level", "MWh", "std"), + ("Pondage_level", "MWh", "min"), + ("Pondage_level", "MWh", "max"), + ("Battery_injection", "MW", "EXP"), + ("Battery_injection", "MW", "std"), + ("Battery_injection", "MW", "min"), + ("Battery_injection", "MW", "max"), + ("Battery_withdrawal", "MW", "EXP"), + ("Battery_withdrawal", "MW", "std"), + ("Battery_withdrawal", "MW", "min"), + ("Battery_withdrawal", "MW", "max"), + ("Battery_level", "MWh", "EXP"), + ("Battery_level", "MWh", "std"), + ("Battery_level", "MWh", "min"), + ("Battery_level", "MWh", "max"), + ("Other1_injection", "MW", "EXP"), + ("Other1_injection", "MW", "std"), + ("Other1_injection", "MW", "min"), + ("Other1_injection", "MW", "max"), + ("Other1_withdrawal", "MW", "EXP"), + ("Other1_withdrawal", "MW", "std"), + ("Other1_withdrawal", "MW", "min"), + ("Other1_withdrawal", "MW", "max"), + ("Other1_level", "MWh", "EXP"), + ("Other1_level", "MWh", "std"), + ("Other1_level", "MWh", "min"), + ("Other1_level", "MWh", "max"), + ("Other2_injection", "MW", "EXP"), + ("Other2_injection", "MW", "std"), + ("Other2_injection", "MW", "min"), + ("Other2_injection", "MW", "max"), + ("Other2_withdrawal", "MW", "EXP"), + ("Other2_withdrawal", "MW", "std"), + ("Other2_withdrawal", "MW", "min"), + ("Other2_withdrawal", "MW", "max"), + ("Other2_level", "MWh", "EXP"), + ("Other2_level", "MWh", "std"), + ("Other2_level", "MWh", "min"), + ("Other2_level", "MWh", "max"), + ("Other3_injection", "MW", "EXP"), + ("Other3_injection", "MW", "std"), + ("Other3_injection", "MW", "min"), + ("Other3_injection", "MW", "max"), + ("Other3_withdrawal", "MW", "EXP"), + ("Other3_withdrawal", "MW", "std"), + ("Other3_withdrawal", "MW", "min"), + ("Other3_withdrawal", "MW", "max"), + ("Other3_level", "MWh", "EXP"), + ("Other3_level", "MWh", "std"), + ("Other3_level", "MWh", "min"), + ("Other3_level", "MWh", "max"), + ("Other4_injection", "MW", "EXP"), + ("Other4_injection", "MW", "std"), + ("Other4_injection", "MW", "min"), + ("Other4_injection", "MW", "max"), + ("Other4_withdrawal", "MW", "EXP"), + ("Other4_withdrawal", "MW", "std"), + ("Other4_withdrawal", "MW", "min"), + ("Other4_withdrawal", "MW", "max"), + ("Other4_level", "MWh", "EXP"), + ("Other4_level", "MWh", "std"), + ("Other4_level", "MWh", "min"), + ("Other4_level", "MWh", "max"), + ("Other5_injection", "MW", "EXP"), + ("Other5_injection", "MW", "std"), + ("Other5_injection", "MW", "min"), + ("Other5_injection", "MW", "max"), + ("Other5_withdrawal", "MW", "EXP"), + ("Other5_withdrawal", "MW", "std"), + ("Other5_withdrawal", "MW", "min"), + ("Other5_withdrawal", "MW", "max"), + ("Other5_level", "MWh", "EXP"), + ("Other5_level", "MWh", "std"), + ("Other5_level", "MWh", "min"), + ("Other5_level", "MWh", "max"), + ("UNSP. ENRG", "MWh", "EXP"), + ("UNSP. ENRG", "MWh", "std"), + ("UNSP. ENRG", "MWh", "min"), + ("UNSP. ENRG", "MWh", "max"), + ("SPIL. ENRG", "MWh", "EXP"), + ("SPIL. ENRG", "MWh", "std"), + ("SPIL. ENRG", "MWh", "min"), + ("SPIL. ENRG", "MWh", "max"), + ("LOLD", "Hours", "EXP"), + ("LOLD", "Hours", "std"), + ("LOLD", "Hours", "min"), + ("LOLD", "Hours", "max"), + ("LOLP", "%", "values"), + ("AVL DTG", "MWh", "EXP"), + ("AVL DTG", "MWh", "std"), + ("AVL DTG", "MWh", "min"), + ("AVL DTG", "MWh", "max"), + ("DTG MRG", "MWh", "EXP"), + ("DTG MRG", "MWh", "std"), + ("DTG MRG", "MWh", "min"), + ("DTG MRG", "MWh", "max"), + ("MAX MRG", "MWh", "EXP"), + ("MAX MRG", "MWh", "std"), + ("MAX MRG", "MWh", "min"), + ("MAX MRG", "MWh", "max"), + ("NP COST", "Euro", "EXP"), + ("NP COST", "Euro", "std"), + ("NP COST", "Euro", "min"), + ("NP COST", "Euro", "max"), + ("NODU", " ", "EXP"), + ("NODU", " ", "std"), + ("NODU", " ", "min"), + ("NODU", " ", "max"), + ], + "data": [ + [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + -0.0, + 0.0, + -0.0, + -0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + math.nan, + math.nan, + math.nan, + math.nan, + 0.0, + 0.0, + 0.0, + 0.0, + math.nan, + math.nan, + math.nan, + math.nan, + math.nan, + math.nan, + math.nan, + math.nan, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + -0.0, + 0.0, + -0.0, + -0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + math.nan, + math.nan, + math.nan, + math.nan, + 0.0, + 0.0, + 0.0, + 0.0, + math.nan, + math.nan, + math.nan, + math.nan, + math.nan, + math.nan, + math.nan, + math.nan, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + -0.0, + 0.0, + -0.0, + -0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + math.nan, + math.nan, + math.nan, + math.nan, + 0.0, + 0.0, + 0.0, + 0.0, + math.nan, + math.nan, + math.nan, + math.nan, + math.nan, + math.nan, + math.nan, + math.nan, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + -0.0, + 0.0, + -0.0, + -0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + math.nan, + math.nan, + math.nan, + math.nan, + 0.0, + 0.0, + 0.0, + 0.0, + math.nan, + math.nan, + math.nan, + math.nan, + math.nan, + math.nan, + math.nan, + math.nan, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + -0.0, + 0.0, + -0.0, + -0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + math.nan, + math.nan, + math.nan, + math.nan, + 0.0, + 0.0, + 0.0, + 0.0, + math.nan, + math.nan, + math.nan, + math.nan, + math.nan, + math.nan, + math.nan, + math.nan, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + -0.0, + 0.0, + -0.0, + -0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + math.nan, + math.nan, + math.nan, + math.nan, + 0.0, + 0.0, + 0.0, + 0.0, + math.nan, + math.nan, + math.nan, + math.nan, + math.nan, + math.nan, + math.nan, + math.nan, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + -0.0, + 0.0, + -0.0, + -0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + math.nan, + math.nan, + math.nan, + math.nan, + 0.0, + 0.0, + 0.0, + 0.0, + math.nan, + math.nan, + math.nan, + math.nan, + math.nan, + math.nan, + math.nan, + math.nan, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + -0.0, + 0.0, + -0.0, + -0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + math.nan, + math.nan, + math.nan, + math.nan, + 0.0, + 0.0, + 0.0, + 0.0, + math.nan, + math.nan, + math.nan, + math.nan, + math.nan, + math.nan, + math.nan, + math.nan, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + -0.0, + 0.0, + -0.0, + -0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + math.nan, + math.nan, + math.nan, + math.nan, + 0.0, + 0.0, + 0.0, + 0.0, + math.nan, + math.nan, + math.nan, + math.nan, + math.nan, + math.nan, + math.nan, + math.nan, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + -0.0, + 0.0, + -0.0, + -0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + math.nan, + math.nan, + math.nan, + math.nan, + 0.0, + 0.0, + 0.0, + 0.0, + math.nan, + math.nan, + math.nan, + math.nan, + math.nan, + math.nan, + math.nan, + math.nan, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + -0.0, + 0.0, + -0.0, + -0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + math.nan, + math.nan, + math.nan, + math.nan, + 0.0, + 0.0, + 0.0, + 0.0, + math.nan, + math.nan, + math.nan, + math.nan, + math.nan, + math.nan, + math.nan, + math.nan, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + -0.0, + 0.0, + -0.0, + -0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + math.nan, + math.nan, + math.nan, + math.nan, + 0.0, + 0.0, + 0.0, + 0.0, + math.nan, + math.nan, + math.nan, + math.nan, + math.nan, + math.nan, + math.nan, + math.nan, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + ], + "index": ["01", "02", "03", "04", "05", "06", "07", "08", "09", "10", "11", "12"], +} diff --git a/tests/storage/integration/test_STA_mini.py b/tests/storage/integration/test_STA_mini.py index e83cda5847..c7ce7c03f1 100644 --- a/tests/storage/integration/test_STA_mini.py +++ b/tests/storage/integration/test_STA_mini.py @@ -20,6 +20,8 @@ from tests.helpers import assert_study from tests.storage.integration.data.de_details_hourly import de_details_hourly from tests.storage.integration.data.de_fr_values_hourly import de_fr_values_hourly +from tests.storage.integration.data.set_id_annual import set_id_annual +from tests.storage.integration.data.set_values_monthly import set_values_monthly ADMIN = JWTUser( id=1, @@ -308,6 +310,31 @@ def test_sta_mini_input(storage_service, url: str, expected_output: dict): @pytest.mark.parametrize( "url, expected_output", [ + ( + "/v1/studies/STA-mini/raw?path=output/20241807-1540eco-extra-outputs/economy/mc-all/binding_constraints/binding-constraints-annual", + { + "columns": [ + ("contrainte (<)", " ", "EXP"), + ("contrainte (<)", " ", "std"), + ("contrainte (<)", " ", "min"), + ("contrainte (<)", " ", "max"), + ], + "index": ["Annual"], + "data": [[0.0, 0.0, 0.0, 0.0]], + }, + ), + ( + "/v1/studies/STA-mini/raw?path=output/20241807-1540eco-extra-outputs/economy/mc-all/areas/@ all areas/values-monthly", + set_values_monthly, + ), + ( + "/v1/studies/STA-mini/raw?path=output/20241807-1540eco-extra-outputs/economy/mc-all/areas/@ all areas/id-annual", + set_id_annual, + ), + ( + "/v1/studies/STA-mini/raw?path=output/20241807-1540eco-extra-outputs/ts-numbers/bindingconstraints/default", + [1], + ), ( "/v1/studies/STA-mini/raw?path=output/20201014-1422eco-hello/annualSystemCost", b"EXP : 185808000\nSTD : 0\nMIN : 185808000\nMAX : 185808000\n", @@ -354,28 +381,11 @@ def test_sta_mini_input(storage_service, url: str, expected_output: dict): ), ( "/v1/studies/STA-mini/raw?path=output/20201014-1422eco-hello/economy/mc-all/links/de/fr", - { - "values-hourly": "matrixfile://values-hourly.txt", - "id-hourly": "matrixfile://id-hourly.txt", - "values-daily": "matrixfile://values-daily.txt", - "id-daily": "matrixfile://id-daily.txt", - "values-weekly": "matrixfile://values-weekly.txt", - "id-weekly": "matrixfile://id-weekly.txt", - "values-monthly": "matrixfile://values-monthly.txt", - "id-monthly": "matrixfile://id-monthly.txt", - "values-annual": "matrixfile://values-annual.txt", - "id-annual": "matrixfile://id-annual.txt", - }, + {}, ), ( "/v1/studies/STA-mini/raw?path=output/20201014-1422eco-hello/economy/mc-ind/00001/links/de/fr", - { - "values-hourly": "matrixfile://values-hourly.txt", - "values-daily": "matrixfile://values-daily.txt", - "values-weekly": "matrixfile://values-weekly.txt", - "values-monthly": "matrixfile://values-monthly.txt", - "values-annual": "matrixfile://values-annual.txt", - }, + {"values-hourly": "matrixfile://values-hourly.txt"}, ), ( "/v1/studies/STA-mini/raw?path=output/20201014-1422eco-hello/economy/mc-ind/00001/links/de/fr/values-hourly", diff --git a/tests/storage/integration/test_exporter.py b/tests/storage/integration/test_exporter.py index 3e4e5666f3..5c273837e4 100644 --- a/tests/storage/integration/test_exporter.py +++ b/tests/storage/integration/test_exporter.py @@ -131,7 +131,7 @@ def test_export_flat( assert export_output_path.exists() files = set(export_output_path.iterdir()) if output_list is None: - assert len(files) == 5 + assert len(files) == 6 elif len(output_list) == 0: assert not files else: diff --git a/tests/storage/repository/filesystem/matrix/input_series_matrix_test.py b/tests/storage/repository/filesystem/matrix/input_series_matrix_test.py deleted file mode 100644 index a422c43f8d..0000000000 --- a/tests/storage/repository/filesystem/matrix/input_series_matrix_test.py +++ /dev/null @@ -1,42 +0,0 @@ -from pathlib import Path -from unittest.mock import Mock - -from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig -from antarest.study.storage.rawstudy.model.filesystem.matrix.input_series_matrix import InputSeriesMatrix - - -def test_get(tmp_path: Path) -> None: - file = tmp_path / "input.txt" - content = """ -100000 100000 0.010000 0.010000 0 0 0 0 -100000 100000 0.010000 0.010000 0 0 0 0 - """ - file.write_text(content) - - config = FileStudyTreeConfig(study_path=file, path=file, version=-1, study_id="id") - node = InputSeriesMatrix(context=Mock(), config=config, nb_columns=8) - - assert node.load() == { - "columns": [0, 1, 2, 3, 4, 5, 6, 7], - "data": [ - [100000.0, 100000.0, 0.01, 0.01, 0.0, 0.0, 0.0, 0.0], - [100000.0, 100000.0, 0.01, 0.01, 0.0, 0.0, 0.0, 0.0], - ], - "index": [0, 1], - } - - -def test_save(tmp_path: Path) -> None: - file = tmp_path / "input.txt" - file.write_text("\n") - - config = FileStudyTreeConfig(study_path=file, path=file, study_id="id", version=-1) - node = InputSeriesMatrix(context=Mock(), config=config) - - node.dump({"columns": [0, 1], "data": [[1, 2], [3, 4]], "index": [0, 1]}) - assert ( - file.read_text() - == """1\t2 -3\t4 -""" - ) diff --git a/tests/storage/repository/filesystem/matrix/output_series_matrix_test.py b/tests/storage/repository/filesystem/matrix/output_series_matrix_test.py deleted file mode 100644 index d739e73b0d..0000000000 --- a/tests/storage/repository/filesystem/matrix/output_series_matrix_test.py +++ /dev/null @@ -1,91 +0,0 @@ -from pathlib import Path -from unittest.mock import Mock - -import pandas as pd - -from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig -from antarest.study.storage.rawstudy.model.filesystem.matrix.head_writer import AreaHeadWriter -from antarest.study.storage.rawstudy.model.filesystem.matrix.matrix import MatrixFrequency -from antarest.study.storage.rawstudy.model.filesystem.matrix.output_series_matrix import OutputSeriesMatrix - -MATRIX_DAILY_DATA = """\ -DE\tarea\tva\thourly -\tVARIABLES\tBEGIN\tEND -\t2\t1\t2 - -DE\thourly\t\t\t\t01_solar\t02_wind_on -\t\t\t\t\tMWh\tMWh -\tindex\tday\tmonth\thourly\tEXP\tEXP -\t1\t1\tJAN\t00:00\t27000\t600 -\t2\t1\tJAN\t01:00\t48000\t34400 -""" - - -def test_get(tmp_path: Path) -> None: - file = tmp_path / "matrix-daily.txt" - file.write_text("\n\n\n\nmock\tfile\ndummy\tdummy\ndummy\tdummy\ndummy\tdummy") - config = FileStudyTreeConfig(study_path=file, path=file, study_id="id", version=-1) - - serializer = Mock() - serializer.extract_date.return_value = ( - pd.Index(["01/02", "01/01"]), - pd.DataFrame( - data={ - ("01_solar", "MWh", "EXP"): [27000, 48000], - ("02_wind_on", "MWh", "EXP"): [600, 34400], - } - ), - ) - - matrix = pd.DataFrame( - data={ - ("01_solar", "MWh", "EXP"): [27000, 48000], - ("02_wind_on", "MWh", "EXP"): [600, 34400], - }, - index=["01/02", "01/01"], - ) - - node = OutputSeriesMatrix( - context=Mock(), - config=config, - freq=MatrixFrequency.DAILY, - date_serializer=serializer, - head_writer=AreaHeadWriter(area="", data_type="", freq=""), - ) - assert node.load() == matrix.to_dict(orient="split") - - -def test_save(tmp_path: Path) -> None: - file = tmp_path / "matrix-daily.txt" - config = FileStudyTreeConfig(study_path=file, path=file, study_id="id", version=-1) - - serializer = Mock() - serializer.build_date.return_value = pd.DataFrame( - { - 0: ["DE", "", "", "", ""], - 1: ["hourly", "", "index", 1, 2], - 2: ["", "", "day", "1", "1"], - 3: ["", "", "month", "JAN", "JAN"], - 4: ["", "", "hourly", "00:00", "01:00"], - } - ) - - node = OutputSeriesMatrix( - context=Mock(), - config=config, - freq=MatrixFrequency.DAILY, - date_serializer=serializer, - head_writer=AreaHeadWriter(area="de", data_type="va", freq="hourly"), - ) - - matrix = pd.DataFrame( - data={ - ("01_solar", "MWh", "EXP"): [27000, 48000], - ("02_wind_on", "MWh", "EXP"): [600, 34400], - }, - index=["01/01", "01/02"], - ) - - node.dump(matrix.to_dict(orient="split")) - actual = file.read_text() - assert actual == MATRIX_DAILY_DATA diff --git a/tests/storage/repository/filesystem/matrix/date_serializer_test.py b/tests/storage/repository/filesystem/matrix/test_date_serializer.py similarity index 100% rename from tests/storage/repository/filesystem/matrix/date_serializer_test.py rename to tests/storage/repository/filesystem/matrix/test_date_serializer.py diff --git a/tests/storage/repository/filesystem/matrix/head_writer_test.py b/tests/storage/repository/filesystem/matrix/test_head_writer.py similarity index 100% rename from tests/storage/repository/filesystem/matrix/head_writer_test.py rename to tests/storage/repository/filesystem/matrix/test_head_writer.py diff --git a/tests/storage/repository/filesystem/matrix/test_input_series_matrix.py b/tests/storage/repository/filesystem/matrix/test_input_series_matrix.py new file mode 100644 index 0000000000..6b7bcbaa01 --- /dev/null +++ b/tests/storage/repository/filesystem/matrix/test_input_series_matrix.py @@ -0,0 +1,94 @@ +import textwrap +import typing as t +from pathlib import Path +from unittest.mock import Mock + +import pytest + +from antarest.matrixstore.service import ISimpleMatrixService +from antarest.matrixstore.uri_resolver_service import UriResolverService +from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig +from antarest.study.storage.rawstudy.model.filesystem.context import ContextServer +from antarest.study.storage.rawstudy.model.filesystem.folder_node import ChildNotFoundError +from antarest.study.storage.rawstudy.model.filesystem.matrix.input_series_matrix import InputSeriesMatrix + + +class TestInputSeriesMatrix: + @pytest.fixture(name="my_study_config") + def fixture_my_study_config(self, tmp_path: Path) -> FileStudyTreeConfig: + """ + Construct a FileStudyTreeConfig object for a dummy study stored in a temporary directory. + """ + return FileStudyTreeConfig( + study_path=tmp_path, + path=tmp_path / "input.txt", + study_id="df0a8aa9-6c6f-4e8b-a84e-45de2fb29cd3", + version=800, + ) + + def test_load(self, my_study_config: FileStudyTreeConfig) -> None: + file = my_study_config.path + content = textwrap.dedent( + """\ + 100000\t100000\t0.010000\t0.010000\t0\t0\t0\t3.14 + 100000\t100000\t0.010000\t0.010000\t0\t0\t0\t6.28 + """ + ) + file.write_text(content) + + node = InputSeriesMatrix(context=Mock(), config=my_study_config, nb_columns=8) + actual = node.load() + expected = { + "columns": [0, 1, 2, 3, 4, 5, 6, 7], + "data": [ + [100000.0, 100000.0, 0.01, 0.01, 0.0, 0.0, 0.0, 3.14], + [100000.0, 100000.0, 0.01, 0.01, 0.0, 0.0, 0.0, 6.28], + ], + "index": [0, 1], + } + assert actual == expected + + def test_load__file_not_found(self, my_study_config: FileStudyTreeConfig) -> None: + node = InputSeriesMatrix(context=Mock(), config=my_study_config) + with pytest.raises(ChildNotFoundError) as ctx: + node.load() + err_msg = str(ctx.value) + assert "input.txt" in err_msg + assert my_study_config.study_id in err_msg + assert "not found" in err_msg.lower() + + def test_load__link_to_matrix(self, my_study_config: FileStudyTreeConfig) -> None: + link = my_study_config.path.with_suffix(".txt.link") + matrix_uri = "matrix://54e252eb14c0440055c82520c338376ff436e1d7ed6cb7283084c89e2e472c42" + matrix_obj = { + "data": [[1, 2], [3, 4]], + "index": [0, 1], + "columns": [0, 1], + } + link.write_text(matrix_uri) + + def resolve(uri: str, formatted: bool = True) -> t.Dict[str, t.Any]: + assert uri == matrix_uri + assert formatted is True + return matrix_obj + + context = ContextServer( + matrix=Mock(spec=ISimpleMatrixService), + resolver=Mock(spec=UriResolverService, resolve=resolve), + ) + + node = InputSeriesMatrix(context=context, config=my_study_config) + actual = node.load() + assert actual == matrix_obj + + def test_save(self, my_study_config: FileStudyTreeConfig) -> None: + node = InputSeriesMatrix(context=Mock(), config=my_study_config) + node.dump({"columns": [0, 1], "data": [[1, 2], [3, 4]], "index": [0, 1]}) + actual = my_study_config.path.read_text() + expected = textwrap.dedent( + """\ + 1\t2 + 3\t4 + """ + ) + assert actual == expected diff --git a/tests/storage/repository/filesystem/matrix/test_output_series_matrix.py b/tests/storage/repository/filesystem/matrix/test_output_series_matrix.py new file mode 100644 index 0000000000..c93f999627 --- /dev/null +++ b/tests/storage/repository/filesystem/matrix/test_output_series_matrix.py @@ -0,0 +1,116 @@ +from pathlib import Path +from unittest.mock import Mock + +import pandas as pd +import pytest + +from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig +from antarest.study.storage.rawstudy.model.filesystem.folder_node import ChildNotFoundError +from antarest.study.storage.rawstudy.model.filesystem.matrix.head_writer import AreaHeadWriter +from antarest.study.storage.rawstudy.model.filesystem.matrix.matrix import MatrixFrequency +from antarest.study.storage.rawstudy.model.filesystem.matrix.output_series_matrix import OutputSeriesMatrix + +MATRIX_DAILY_DATA = """\ +DE\tarea\tva\thourly +\tVARIABLES\tBEGIN\tEND +\t2\t1\t2 + +DE\thourly\t\t\t\t01_solar\t02_wind_on +\t\t\t\t\tMWh\tMWh +\tindex\tday\tmonth\thourly\tEXP\tEXP +\t1\t1\tJAN\t00:00\t27000\t600 +\t2\t1\tJAN\t01:00\t48000\t34400 +""" + + +class TestOutputSeriesMatrix: + @pytest.fixture(name="my_study_config") + def fixture_my_study_config(self, tmp_path: Path) -> FileStudyTreeConfig: + """ + Construct a FileStudyTreeConfig object for a dummy study stored in a temporary directory. + """ + return FileStudyTreeConfig( + study_path=tmp_path, + path=tmp_path / "matrix-daily.txt", + study_id="df0a8aa9-6c6f-4e8b-a84e-45de2fb29cd3", + version=800, + ) + + def test_load(self, my_study_config: FileStudyTreeConfig) -> None: + file = my_study_config.path + file.write_text("\n\n\n\nmock\tfile\ndummy\tdummy\ndummy\tdummy\ndummy\tdummy") + + serializer = Mock() + serializer.extract_date.return_value = ( + pd.Index(["01/02", "01/01"]), + pd.DataFrame( + data={ + ("01_solar", "MWh", "EXP"): [27000, 48000], + ("02_wind_on", "MWh", "EXP"): [600, 34400], + } + ), + ) + + matrix = pd.DataFrame( + data={ + ("01_solar", "MWh", "EXP"): [27000, 48000], + ("02_wind_on", "MWh", "EXP"): [600, 34400], + }, + index=["01/02", "01/01"], + ) + + node = OutputSeriesMatrix( + context=Mock(), + config=my_study_config, + freq=MatrixFrequency.DAILY, + date_serializer=serializer, + head_writer=AreaHeadWriter(area="", data_type="", freq=""), + ) + assert node.load() == matrix.to_dict(orient="split") + + def test_load__file_not_found(self, my_study_config: FileStudyTreeConfig) -> None: + node = OutputSeriesMatrix( + context=Mock(), + config=my_study_config, + freq=MatrixFrequency.DAILY, + date_serializer=Mock(), + head_writer=AreaHeadWriter(area="", data_type="", freq=""), + ) + with pytest.raises(ChildNotFoundError) as ctx: + node.load() + err_msg = str(ctx.value) + assert "'matrix-daily.txt" in err_msg + assert my_study_config.study_id in err_msg + assert "not found" in err_msg.lower() + + def test_save(self, my_study_config: FileStudyTreeConfig) -> None: + serializer = Mock() + serializer.build_date.return_value = pd.DataFrame( + { + 0: ["DE", "", "", "", ""], + 1: ["hourly", "", "index", 1, 2], + 2: ["", "", "day", "1", "1"], + 3: ["", "", "month", "JAN", "JAN"], + 4: ["", "", "hourly", "00:00", "01:00"], + } + ) + + node = OutputSeriesMatrix( + context=Mock(), + config=my_study_config, + freq=MatrixFrequency.DAILY, + date_serializer=serializer, + head_writer=AreaHeadWriter(area="de", data_type="va", freq="hourly"), + ) + + matrix = pd.DataFrame( + data={ + ("01_solar", "MWh", "EXP"): [27000, 48000], + ("02_wind_on", "MWh", "EXP"): [600, 34400], + }, + index=["01/01", "01/02"], + ) + + node.dump(matrix.to_dict(orient="split")) # type: ignore + actual = my_study_config.path.read_text() + assert actual == MATRIX_DAILY_DATA diff --git a/tests/storage/repository/filesystem/root/output/simulation/mode/common/test_area.py b/tests/storage/repository/filesystem/root/output/simulation/mode/common/test_area.py index 34be106791..2ff4f6a44a 100644 --- a/tests/storage/repository/filesystem/root/output/simulation/mode/common/test_area.py +++ b/tests/storage/repository/filesystem/root/output/simulation/mode/common/test_area.py @@ -1,3 +1,4 @@ +import typing as t import uuid from pathlib import Path from unittest.mock import Mock @@ -12,81 +13,36 @@ from antarest.study.storage.rawstudy.model.filesystem.matrix.output_series_matrix import AreaOutputSeriesMatrix from antarest.study.storage.rawstudy.model.filesystem.root.output.simulation.mode.common import area -# noinspection SpellCheckingInspection -MC_ALL_TRUE = { - "details-annual": {"freq": MatrixFrequency.ANNUAL}, - "details-daily": {"freq": MatrixFrequency.DAILY}, - "details-hourly": {"freq": MatrixFrequency.HOURLY}, - "details-monthly": {"freq": MatrixFrequency.MONTHLY}, - "details-res-annual": {"freq": MatrixFrequency.ANNUAL}, - "details-res-daily": {"freq": MatrixFrequency.DAILY}, - "details-res-hourly": {"freq": MatrixFrequency.HOURLY}, - "details-res-monthly": {"freq": MatrixFrequency.MONTHLY}, - "details-res-weekly": {"freq": MatrixFrequency.WEEKLY}, - "details-weekly": {"freq": MatrixFrequency.WEEKLY}, - "id-annual": {"freq": MatrixFrequency.ANNUAL}, - "id-daily": {"freq": MatrixFrequency.DAILY}, - "id-hourly": {"freq": MatrixFrequency.HOURLY}, - "id-monthly": {"freq": MatrixFrequency.MONTHLY}, - "id-weekly": {"freq": MatrixFrequency.WEEKLY}, - "values-annual": {"freq": MatrixFrequency.ANNUAL}, - "values-daily": {"freq": MatrixFrequency.DAILY}, - "values-hourly": {"freq": MatrixFrequency.HOURLY}, - "values-monthly": {"freq": MatrixFrequency.MONTHLY}, - "values-weekly": {"freq": MatrixFrequency.WEEKLY}, -} - -# noinspection SpellCheckingInspection -MC_ALL_FALSE = { - "details-annual": {"freq": MatrixFrequency.ANNUAL}, - "details-daily": {"freq": MatrixFrequency.DAILY}, - "details-hourly": {"freq": MatrixFrequency.HOURLY}, - "details-monthly": {"freq": MatrixFrequency.MONTHLY}, - "details-res-annual": {"freq": MatrixFrequency.ANNUAL}, - "details-res-daily": {"freq": MatrixFrequency.DAILY}, - "details-res-hourly": {"freq": MatrixFrequency.HOURLY}, - "details-res-monthly": {"freq": MatrixFrequency.MONTHLY}, - "details-res-weekly": {"freq": MatrixFrequency.WEEKLY}, - "details-weekly": {"freq": MatrixFrequency.WEEKLY}, - "values-annual": {"freq": MatrixFrequency.ANNUAL}, - "values-daily": {"freq": MatrixFrequency.DAILY}, - "values-hourly": {"freq": MatrixFrequency.HOURLY}, - "values-monthly": {"freq": MatrixFrequency.MONTHLY}, - "values-weekly": {"freq": MatrixFrequency.WEEKLY}, -} - class TestOutputSimulationAreaItem: @pytest.mark.parametrize( - "mc_all, expected", + "existing_files", [ - pytest.param(True, MC_ALL_TRUE, id="mc-all-True"), - pytest.param(False, MC_ALL_FALSE, id="mc-all-False"), + pytest.param(["details-annual.txt"]), + pytest.param(["details-res-hourly.txt", "values-monthly.txt", "details-STstorage-daily.txt"]), + pytest.param([]), ], ) - def test_build_output_simulation_area_item( - self, - mc_all: bool, - expected: dict, - ): + def test_build_output_simulation_area_item(self, existing_files: t.List[str], tmp_path: Path): + expected = {} + for file in existing_files: + tmp_path.joinpath(file).touch() + name = Path(file).stem + splitted = name.split("-") + expected[name] = {"freq": MatrixFrequency(splitted[len(splitted) - 1])} matrix = Mock(spec=ISimpleMatrixService) resolver = Mock(spec=UriResolverService) context = ContextServer(matrix=matrix, resolver=resolver) study_id = str(uuid.uuid4()) config = FileStudyTreeConfig( study_path=Path("path/to/study"), - path=Path("path/to/study"), + path=tmp_path, study_id=study_id, version=850, # will become a `str` in the future areas={}, ) - node = area.OutputSimulationAreaItem( - context=context, - config=config, - area="fr", - mc_all=mc_all, - ) + node = area.OutputSimulationAreaItem(context=context, config=config, area="fr") actual = node.build() # check the result @@ -96,25 +52,15 @@ def test_build_output_simulation_area_item( new_config = FileStudyTreeConfig( study_path=Path("path/to/study"), - path=Path("path/to/study"), + path=tmp_path, study_id=study_id, version=860, # will become a `str` in the future areas={}, ) - new_node = area.OutputSimulationAreaItem( - context=context, - config=new_config, - area="fr", - mc_all=mc_all, - ) + new_node = area.OutputSimulationAreaItem(context=context, config=new_config, area="fr") new_actual = new_node.build() # check the result actual_obj = {key: {"freq": value.freq} for key, value in new_actual.items()} - expected["details-STstorage-annual"] = {"freq": MatrixFrequency.ANNUAL} - expected["details-STstorage-daily"] = {"freq": MatrixFrequency.DAILY} - expected["details-STstorage-hourly"] = {"freq": MatrixFrequency.HOURLY} - expected["details-STstorage-monthly"] = {"freq": MatrixFrequency.MONTHLY} - expected["details-STstorage-weekly"] = {"freq": MatrixFrequency.WEEKLY} assert actual_obj == expected diff --git a/tests/storage/repository/filesystem/root/output/simulation/mode/common/test_binding_const.py b/tests/storage/repository/filesystem/root/output/simulation/mode/common/test_binding_const.py index 0b3161d193..248468d5de 100644 --- a/tests/storage/repository/filesystem/root/output/simulation/mode/common/test_binding_const.py +++ b/tests/storage/repository/filesystem/root/output/simulation/mode/common/test_binding_const.py @@ -1,3 +1,4 @@ +import typing as t import uuid from pathlib import Path from unittest.mock import Mock @@ -14,44 +15,34 @@ ) from antarest.study.storage.rawstudy.model.filesystem.root.output.simulation.mode.common import binding_const -# noinspection SpellCheckingInspection -NOMINAL_CASE = { - "binding-constraints-annual": {"freq": MatrixFrequency.ANNUAL}, - "binding-constraints-daily": {"freq": MatrixFrequency.DAILY}, - "binding-constraints-hourly": {"freq": MatrixFrequency.HOURLY}, - "binding-constraints-monthly": {"freq": MatrixFrequency.MONTHLY}, - "binding-constraints-weekly": {"freq": MatrixFrequency.WEEKLY}, -} - class TestOutputSimulationBindingConstraintItem: @pytest.mark.parametrize( - "expected", + "existing_files", [ - pytest.param(NOMINAL_CASE, id="nominal-case-True"), + pytest.param(["binding-constraints-hourly.txt", "binding-constraints-daily.txt"]), + pytest.param([]), ], ) - def test_build_output_simulation_binding_constraint_item( - self, - expected: dict, - ): + def test_build_output_simulation_binding_constraint_item(self, existing_files: t.List[str], tmp_path: Path): + expected = {} + for file in existing_files: + tmp_path.joinpath(file).touch() + name = Path(file).stem + expected[name] = {"freq": MatrixFrequency(name.replace("binding-constraints-", ""))} matrix = Mock(spec=ISimpleMatrixService) resolver = Mock(spec=UriResolverService) context = ContextServer(matrix=matrix, resolver=resolver) study_id = str(uuid.uuid4()) config = FileStudyTreeConfig( study_path=Path("path/to/study"), - path=Path("path/to/study"), + path=tmp_path, study_id=study_id, version=850, # will become a `str` in the future areas={}, ) - node = binding_const.OutputSimulationBindingConstraintItem( - context=context, - config=config, - children_glob_exceptions=None, - ) + node = binding_const.OutputSimulationBindingConstraintItem(context=context, config=config) actual = node.build() # check the result diff --git a/tests/storage/repository/filesystem/root/output/simulation/mode/common/test_link.py b/tests/storage/repository/filesystem/root/output/simulation/mode/common/test_link.py index 3c7dcdb1d2..765187a186 100644 --- a/tests/storage/repository/filesystem/root/output/simulation/mode/common/test_link.py +++ b/tests/storage/repository/filesystem/root/output/simulation/mode/common/test_link.py @@ -1,3 +1,4 @@ +import typing as t import uuid from pathlib import Path from unittest.mock import Mock @@ -12,62 +13,34 @@ from antarest.study.storage.rawstudy.model.filesystem.matrix.output_series_matrix import LinkOutputSeriesMatrix from antarest.study.storage.rawstudy.model.filesystem.root.output.simulation.mode.common import link -# noinspection SpellCheckingInspection -MC_ALL_TRUE = { - "id-annual": {"freq": MatrixFrequency.ANNUAL}, - "id-daily": {"freq": MatrixFrequency.DAILY}, - "id-hourly": {"freq": MatrixFrequency.HOURLY}, - "id-monthly": {"freq": MatrixFrequency.MONTHLY}, - "id-weekly": {"freq": MatrixFrequency.WEEKLY}, - "values-annual": {"freq": MatrixFrequency.ANNUAL}, - "values-daily": {"freq": MatrixFrequency.DAILY}, - "values-hourly": {"freq": MatrixFrequency.HOURLY}, - "values-monthly": {"freq": MatrixFrequency.MONTHLY}, - "values-weekly": {"freq": MatrixFrequency.WEEKLY}, -} - -# noinspection SpellCheckingInspection -MC_ALL_FALSE = { - "values-annual": {"freq": MatrixFrequency.ANNUAL}, - "values-daily": {"freq": MatrixFrequency.DAILY}, - "values-hourly": {"freq": MatrixFrequency.HOURLY}, - "values-monthly": {"freq": MatrixFrequency.MONTHLY}, - "values-weekly": {"freq": MatrixFrequency.WEEKLY}, -} - class TestOutputSimulationLinkItem: @pytest.mark.parametrize( - "mc_all, expected", + "existing_files", [ - pytest.param(True, MC_ALL_TRUE, id="mc-all-True"), - pytest.param(False, MC_ALL_FALSE, id="mc-all-False"), + pytest.param(["id-hourly.txt"]), + pytest.param(["values-monthly.txt", "id-annual.txt"]), ], ) - def test_build_output_simulation_link_item( - self, - mc_all: bool, - expected: dict, - ): + def test_build_output_simulation_link_item(self, existing_files: t.List[str], tmp_path: Path): + expected = {} + for file in existing_files: + tmp_path.joinpath(file).touch() + name = Path(file).stem + expected[name] = {"freq": MatrixFrequency(name.split("-")[1])} matrix = Mock(spec=ISimpleMatrixService) resolver = Mock(spec=UriResolverService) context = ContextServer(matrix=matrix, resolver=resolver) study_id = str(uuid.uuid4()) config = FileStudyTreeConfig( study_path=Path("path/to/study"), - path=Path("path/to/study"), + path=tmp_path, study_id=study_id, version=850, # will become a `str` in the future areas={}, ) - node = link.OutputSimulationLinkItem( - context=context, - config=config, - area="fr", - link="fr -> de", - mc_all=mc_all, - ) + node = link.OutputSimulationLinkItem(context=context, config=config, area="fr", link="fr -> de") actual = node.build() # check the result diff --git a/tests/storage/repository/filesystem/root/output/simulation/mode/common/test_set.py b/tests/storage/repository/filesystem/root/output/simulation/mode/common/test_set.py index c56f1dc8ff..0213647a8e 100644 --- a/tests/storage/repository/filesystem/root/output/simulation/mode/common/test_set.py +++ b/tests/storage/repository/filesystem/root/output/simulation/mode/common/test_set.py @@ -1,3 +1,4 @@ +import typing as t import uuid from pathlib import Path from unittest.mock import Mock @@ -12,61 +13,34 @@ from antarest.study.storage.rawstudy.model.filesystem.matrix.output_series_matrix import AreaOutputSeriesMatrix from antarest.study.storage.rawstudy.model.filesystem.root.output.simulation.mode.common import set -# noinspection SpellCheckingInspection -MC_ALL_TRUE = { - "id-annual": {"freq": MatrixFrequency.ANNUAL}, - "id-daily": {"freq": MatrixFrequency.DAILY}, - "id-hourly": {"freq": MatrixFrequency.HOURLY}, - "id-monthly": {"freq": MatrixFrequency.MONTHLY}, - "id-weekly": {"freq": MatrixFrequency.WEEKLY}, - "values-annual": {"freq": MatrixFrequency.ANNUAL}, - "values-daily": {"freq": MatrixFrequency.DAILY}, - "values-hourly": {"freq": MatrixFrequency.HOURLY}, - "values-monthly": {"freq": MatrixFrequency.MONTHLY}, - "values-weekly": {"freq": MatrixFrequency.WEEKLY}, -} - -# noinspection SpellCheckingInspection -MC_ALL_FALSE = { - "values-annual": {"freq": MatrixFrequency.ANNUAL}, - "values-daily": {"freq": MatrixFrequency.DAILY}, - "values-hourly": {"freq": MatrixFrequency.HOURLY}, - "values-monthly": {"freq": MatrixFrequency.MONTHLY}, - "values-weekly": {"freq": MatrixFrequency.WEEKLY}, -} - class TestOutputSimulationSet: @pytest.mark.parametrize( - "mc_all, expected", + "existing_files", [ - pytest.param(True, MC_ALL_TRUE, id="mc-all-True"), - pytest.param(False, MC_ALL_FALSE, id="mc-all-False"), + pytest.param(["id-hourly.txt", "values-annual.txt"]), + pytest.param([]), ], ) - def test_output_simulation_set( - self, - mc_all: bool, - expected: dict, - ): + def test_output_simulation_set(self, existing_files: t.List[str], tmp_path: Path): + expected = {} + for file in existing_files: + tmp_path.joinpath(file).touch() + name = Path(file).stem + expected[name] = {"freq": MatrixFrequency(name.split("-")[1])} matrix = Mock(spec=ISimpleMatrixService) resolver = Mock(spec=UriResolverService) context = ContextServer(matrix=matrix, resolver=resolver) study_id = str(uuid.uuid4()) config = FileStudyTreeConfig( - study_path=Path("path/to/study"), - path=Path("path/to/study"), + study_path=Path("study_path"), + path=tmp_path, study_id=study_id, version=850, # will become a `str` in the future areas={}, ) - node = set.OutputSimulationSet( - context=context, - config=config, - set="foo", - mc_all=mc_all, - ) + node = set.OutputSimulationSet(context=context, config=config, set="foo") actual = node.build() # check the result diff --git a/tests/storage/repository/filesystem/utils.py b/tests/storage/repository/filesystem/utils.py index abef9e26e5..cb563e8567 100644 --- a/tests/storage/repository/filesystem/utils.py +++ b/tests/storage/repository/filesystem/utils.py @@ -54,7 +54,7 @@ def __init__( config: FileStudyTreeConfig, children: TREE, ): - FolderNode.__init__(self, context, config) + super().__init__(context, config) self.children = children def build(self) -> TREE: diff --git a/webapp/package-lock.json b/webapp/package-lock.json index 3b2f24d6c6..15225c0021 100644 --- a/webapp/package-lock.json +++ b/webapp/package-lock.json @@ -1,15 +1,16 @@ { "name": "antares-web", - "version": "2.17.2", + "version": "2.17.3", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "antares-web", - "version": "2.17.2", + "version": "2.17.3", "dependencies": { "@emotion/react": "11.11.1", "@emotion/styled": "11.11.0", + "@glideapps/glide-data-grid": "6.0.3", "@handsontable/react": "14.1.0", "@mui/icons-material": "5.14.11", "@mui/lab": "5.0.0-alpha.146", @@ -65,6 +66,9 @@ "xml-js": "1.6.11" }, "devDependencies": { + "@testing-library/jest-dom": "6.4.6", + "@testing-library/react": "16.0.0", + "@testing-library/user-event": "14.5.2", "@total-typescript/ts-reset": "0.5.1", "@types/d3": "5.16.0", "@types/debug": "4.1.9", @@ -86,10 +90,13 @@ "@types/react-window": "1.8.6", "@types/redux-logger": "3.0.10", "@types/swagger-ui-react": "4.18.1", + "@types/testing-library__jest-dom": "6.0.0", "@types/uuid": "9.0.4", "@typescript-eslint/eslint-plugin": "6.14.0", "@typescript-eslint/parser": "6.14.0", "@vitejs/plugin-react-swc": "3.5.0", + "@vitest/coverage-v8": "1.6.0", + "@vitest/ui": "1.6.0", "eslint": "8.55.0", "eslint-config-prettier": "9.0.0", "eslint-plugin-jsdoc": "48.2.0", @@ -98,9 +105,11 @@ "eslint-plugin-react-hooks": "4.6.0", "eslint-plugin-react-refresh": "0.4.5", "husky": "8.0.3", + "jsdom": "24.1.0", "prettier": "3.0.3", "typescript": "5.2.2", - "vite": "5.0.8" + "vite": "5.0.8", + "vitest": "1.6.0" }, "engines": { "node": "18.16.1" @@ -115,117 +124,276 @@ "node": ">=0.10.0" } }, + "node_modules/@adobe/css-tools": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/@adobe/css-tools/-/css-tools-4.4.0.tgz", + "integrity": "sha512-Ff9+ksdQQB3rMncgqDK78uLznstjyfIf2Arnh22pW8kBpLs6rpKDwgnZT46hin5Hl1WzazzK64DOrhSwYpS7bQ==", + "dev": true + }, + "node_modules/@ampproject/remapping": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.3.0.tgz", + "integrity": "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==", + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.24" + }, + "engines": { + "node": ">=6.0.0" + } + }, "node_modules/@babel/code-frame": { - "version": "7.23.5", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.23.5.tgz", - "integrity": "sha512-CgH3s1a96LipHCmSUmYFPwY7MNx8C3avkq7i4Wl3cfa662ldtUe4VM1TPXX70pfmrlWTb6jLqTYrZyT2ZTJBgA==", + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.24.7.tgz", + "integrity": "sha512-BcYH1CVJBO9tvyIZ2jVeXgSIMvGZ2FDRvDdOIVQyuklNKSsx+eppDEBq/g47Ayw+RqNFE+URvOShmf+f/qwAlA==", "dependencies": { - "@babel/highlight": "^7.23.4", - "chalk": "^2.4.2" + "@babel/highlight": "^7.24.7", + "picocolors": "^1.0.0" }, "engines": { "node": ">=6.9.0" } }, - "node_modules/@babel/code-frame/node_modules/ansi-styles": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", - "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "node_modules/@babel/compat-data": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.24.7.tgz", + "integrity": "sha512-qJzAIcv03PyaWqxRgO4mSU3lihncDT296vnyuE2O8uA4w3UHWI4S3hgeZd1L8W1Bft40w9JxJ2b412iDUFFRhw==", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/core": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.24.7.tgz", + "integrity": "sha512-nykK+LEK86ahTkX/3TgauT0ikKoNCfKHEaZYTUVupJdTLzGNvrblu4u6fa7DhZONAltdf8e662t/abY8idrd/g==", + "dependencies": { + "@ampproject/remapping": "^2.2.0", + "@babel/code-frame": "^7.24.7", + "@babel/generator": "^7.24.7", + "@babel/helper-compilation-targets": "^7.24.7", + "@babel/helper-module-transforms": "^7.24.7", + "@babel/helpers": "^7.24.7", + "@babel/parser": "^7.24.7", + "@babel/template": "^7.24.7", + "@babel/traverse": "^7.24.7", + "@babel/types": "^7.24.7", + "convert-source-map": "^2.0.0", + "debug": "^4.1.0", + "gensync": "^1.0.0-beta.2", + "json5": "^2.2.3", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/babel" + } + }, + "node_modules/@babel/core/node_modules/convert-source-map": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", + "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==" + }, + "node_modules/@babel/core/node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/@babel/generator": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.24.7.tgz", + "integrity": "sha512-oipXieGC3i45Y1A41t4tAqpnEZWgB/lC6Ehh6+rOviR5XWpTtMmLN+fGjz9vOiNRt0p6RtO6DtD0pdU3vpqdSA==", "dependencies": { - "color-convert": "^1.9.0" + "@babel/types": "^7.24.7", + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.25", + "jsesc": "^2.5.1" }, "engines": { - "node": ">=4" + "node": ">=6.9.0" } }, - "node_modules/@babel/code-frame/node_modules/chalk": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", - "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "node_modules/@babel/helper-compilation-targets": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.24.7.tgz", + "integrity": "sha512-ctSdRHBi20qWOfy27RUb4Fhp07KSJ3sXcuSvTrXrc4aG8NSYDo1ici3Vhg9bg69y5bj0Mr1lh0aeEgTvc12rMg==", "dependencies": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" + "@babel/compat-data": "^7.24.7", + "@babel/helper-validator-option": "^7.24.7", + "browserslist": "^4.22.2", + "lru-cache": "^5.1.1", + "semver": "^6.3.1" }, "engines": { - "node": ">=4" + "node": ">=6.9.0" } }, - "node_modules/@babel/code-frame/node_modules/color-convert": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", - "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "node_modules/@babel/helper-compilation-targets/node_modules/lru-cache": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", "dependencies": { - "color-name": "1.1.3" + "yallist": "^3.0.2" } }, - "node_modules/@babel/code-frame/node_modules/color-name": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" + "node_modules/@babel/helper-compilation-targets/node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "bin": { + "semver": "bin/semver.js" + } }, - "node_modules/@babel/code-frame/node_modules/escape-string-regexp": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "node_modules/@babel/helper-compilation-targets/node_modules/yallist": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", + "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==" + }, + "node_modules/@babel/helper-environment-visitor": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.24.7.tgz", + "integrity": "sha512-DoiN84+4Gnd0ncbBOM9AZENV4a5ZiL39HYMyZJGZ/AZEykHYdJw0wW3kdcsh9/Kn+BRXHLkkklZ51ecPKmI1CQ==", + "dependencies": { + "@babel/types": "^7.24.7" + }, "engines": { - "node": ">=0.8.0" + "node": ">=6.9.0" } }, - "node_modules/@babel/code-frame/node_modules/has-flag": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", + "node_modules/@babel/helper-function-name": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.24.7.tgz", + "integrity": "sha512-FyoJTsj/PEUWu1/TYRiXTIHc8lbw+TDYkZuoE43opPS5TrI7MyONBE1oNvfguEXAD9yhQRrVBnXdXzSLQl9XnA==", + "dependencies": { + "@babel/template": "^7.24.7", + "@babel/types": "^7.24.7" + }, "engines": { - "node": ">=4" + "node": ">=6.9.0" } }, - "node_modules/@babel/code-frame/node_modules/supports-color": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", - "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "node_modules/@babel/helper-hoist-variables": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.24.7.tgz", + "integrity": "sha512-MJJwhkoGy5c4ehfoRyrJ/owKeMl19U54h27YYftT0o2teQ3FJ3nQUf/I3LlJsX4l3qlw7WRXUmiyajvHXoTubQ==", "dependencies": { - "has-flag": "^3.0.0" + "@babel/types": "^7.24.7" }, "engines": { - "node": ">=4" + "node": ">=6.9.0" } }, "node_modules/@babel/helper-module-imports": { - "version": "7.22.15", - "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.22.15.tgz", - "integrity": "sha512-0pYVBnDKZO2fnSPCrgM/6WMc7eS20Fbok+0r88fp+YtWVLZrp4CkafFGIp+W0VKw4a22sgebPT99y+FDNMdP4w==", + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.24.7.tgz", + "integrity": "sha512-8AyH3C+74cgCVVXow/myrynrAGv+nTVg5vKu2nZph9x7RcRwzmh0VFallJuFTZ9mx6u4eSdXZfcOzSqTUm0HCA==", + "dependencies": { + "@babel/traverse": "^7.24.7", + "@babel/types": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-transforms": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.24.7.tgz", + "integrity": "sha512-1fuJEwIrp+97rM4RWdO+qrRsZlAeL1lQJoPqtCYWv0NL115XM93hIH4CSRln2w52SqvmY5hqdtauB6QFCDiZNQ==", "dependencies": { - "@babel/types": "^7.22.15" + "@babel/helper-environment-visitor": "^7.24.7", + "@babel/helper-module-imports": "^7.24.7", + "@babel/helper-simple-access": "^7.24.7", + "@babel/helper-split-export-declaration": "^7.24.7", + "@babel/helper-validator-identifier": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-plugin-utils": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.24.7.tgz", + "integrity": "sha512-Rq76wjt7yz9AAc1KnlRKNAi/dMSVWgDRx43FHoJEbcYU6xOWaE2dVPwcdTukJrjxS65GITyfbvEYHvkirZ6uEg==", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-simple-access": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.24.7.tgz", + "integrity": "sha512-zBAIvbCMh5Ts+b86r/CjU+4XGYIs+R1j951gxI3KmmxBMhCg4oQMsv6ZXQ64XOm/cvzfU1FmoCyt6+owc5QMYg==", + "dependencies": { + "@babel/traverse": "^7.24.7", + "@babel/types": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-split-export-declaration": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.24.7.tgz", + "integrity": "sha512-oy5V7pD+UvfkEATUKvIjvIAH/xCzfsFVw7ygW2SI6NClZzquT+mwdTfgfdbUiceh6iQO0CHtCPsyze/MZ2YbAA==", + "dependencies": { + "@babel/types": "^7.24.7" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-string-parser": { - "version": "7.23.4", - "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.23.4.tgz", - "integrity": "sha512-803gmbQdqwdf4olxrX4AJyFBV/RTr3rSmOj0rKwesmzlfhYNDEs+/iOcznzpNWlJlIlTJC2QfPFcHB6DlzdVLQ==", + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.24.7.tgz", + "integrity": "sha512-7MbVt6xrwFQbunH2DNQsAP5sTGxfqQtErvBIvIMi6EQnbgUOuVYanvREcmFrOPhoXBrTtjhhP+lW+o5UfK+tDg==", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-validator-identifier": { - "version": "7.22.20", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.20.tgz", - "integrity": "sha512-Y4OZ+ytlatR8AI+8KZfKuL5urKp7qey08ha31L8b3BwewJAoJamTzyvxPR/5D+KkdJCGPq/+8TukHBlY10FX9A==", + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.24.7.tgz", + "integrity": "sha512-rR+PBcQ1SMQDDyF6X0wxtG8QyLCgUB0eRAGguqRLfkCA87l7yAP7ehq8SNj96OOGTO8OBV70KhuFYcIkHXOg0w==", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-option": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.24.7.tgz", + "integrity": "sha512-yy1/KvjhV/ZCL+SM7hBrvnZJ3ZuT9OuZgIJAGpPEToANvc3iM6iDvBnRjtElWibHU6n8/LPR/EjX9EtIEYO3pw==", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helpers": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.24.7.tgz", + "integrity": "sha512-NlmJJtvcw72yRJRcnCmGvSi+3jDEg8qFu3z0AFoymmzLx5ERVWyzd9kVXr7Th9/8yIJi2Zc6av4Tqz3wFs8QWg==", + "dependencies": { + "@babel/template": "^7.24.7", + "@babel/types": "^7.24.7" + }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/highlight": { - "version": "7.23.4", - "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.23.4.tgz", - "integrity": "sha512-acGdbYSfp2WheJoJm/EBBBLh/ID8KDc64ISZ9DYtBmC8/Q204PZJLHyzeB5qMzJ5trcOkybd78M4x2KWsUq++A==", + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.24.7.tgz", + "integrity": "sha512-EStJpq4OuY8xYfhGVXngigBJRWxftKX9ksiGDnmlY3o7B/V7KIAc9X4oiK87uPJSc/vs5L869bem5fhZa8caZw==", "dependencies": { - "@babel/helper-validator-identifier": "^7.22.20", + "@babel/helper-validator-identifier": "^7.24.7", "chalk": "^2.4.2", - "js-tokens": "^4.0.0" + "js-tokens": "^4.0.0", + "picocolors": "^1.0.0" }, "engines": { "node": ">=6.9.0" @@ -295,6 +463,71 @@ "node": ">=4" } }, + "node_modules/@babel/parser": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.24.7.tgz", + "integrity": "sha512-9uUYRm6OqQrCqQdG1iCBwBPZgN8ciDBro2nIOFaiRz1/BCxaI7CNvQbDHvsArAC7Tw9Hda/B3U+6ui9u4HWXPw==", + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/plugin-proposal-export-namespace-from": { + "version": "7.18.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-export-namespace-from/-/plugin-proposal-export-namespace-from-7.18.9.tgz", + "integrity": "sha512-k1NtHyOMvlDDFeb9G5PhUXuGj8m/wiwojgQVEhJ/fsVsMCpLyOP4h0uGEjYJKrRI+EVPlb5Jk+Gt9P97lOGwtA==", + "deprecated": "This proposal has been merged to the ECMAScript standard and thus this plugin is no longer maintained. Please use @babel/plugin-transform-export-namespace-from instead.", + "dependencies": { + "@babel/helper-plugin-utils": "^7.18.9", + "@babel/plugin-syntax-export-namespace-from": "^7.8.3" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-dynamic-import": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.8.3.tgz", + "integrity": "sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-export-namespace-from": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-export-namespace-from/-/plugin-syntax-export-namespace-from-7.8.3.tgz", + "integrity": "sha512-MXf5laXo6c1IbEbegDmzGPwGNTsHZmEy6QGznu5Sh2UCWvueywb2ee+CCE4zQiZstxU9BMoQO9i6zUFSY0Kj0Q==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.3" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-modules-commonjs": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.24.7.tgz", + "integrity": "sha512-iFI8GDxtevHJ/Z22J5xQpVqFLlMNstcLXh994xifFwxxGslr2ZXXLWgtBeLctOD63UFDArdvN6Tg8RFw+aEmjQ==", + "dependencies": { + "@babel/helper-module-transforms": "^7.24.7", + "@babel/helper-plugin-utils": "^7.24.7", + "@babel/helper-simple-access": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, "node_modules/@babel/runtime": { "version": "7.23.8", "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.23.8.tgz", @@ -318,19 +551,66 @@ "node": ">=6.9.0" } }, + "node_modules/@babel/template": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.24.7.tgz", + "integrity": "sha512-jYqfPrU9JTF0PmPy1tLYHW4Mp4KlgxJD9l2nP9fD6yT/ICi554DmrWBAEYpIelzjHf1msDP3PxJIRt/nFNfBig==", + "dependencies": { + "@babel/code-frame": "^7.24.7", + "@babel/parser": "^7.24.7", + "@babel/types": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/traverse": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.24.7.tgz", + "integrity": "sha512-yb65Ed5S/QAcewNPh0nZczy9JdYXkkAbIsEo+P7BE7yO3txAY30Y/oPa3QkQ5It3xVG2kpKMg9MsdxZaO31uKA==", + "dependencies": { + "@babel/code-frame": "^7.24.7", + "@babel/generator": "^7.24.7", + "@babel/helper-environment-visitor": "^7.24.7", + "@babel/helper-function-name": "^7.24.7", + "@babel/helper-hoist-variables": "^7.24.7", + "@babel/helper-split-export-declaration": "^7.24.7", + "@babel/parser": "^7.24.7", + "@babel/types": "^7.24.7", + "debug": "^4.3.1", + "globals": "^11.1.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/traverse/node_modules/globals": { + "version": "11.12.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", + "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", + "engines": { + "node": ">=4" + } + }, "node_modules/@babel/types": { - "version": "7.23.6", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.23.6.tgz", - "integrity": "sha512-+uarb83brBzPKN38NX1MkB6vb6+mwvR6amUulqAE7ccQw1pEl+bCia9TbdG1lsnFP7lZySvUn37CHyXQdfTwzg==", + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.24.7.tgz", + "integrity": "sha512-XEFXSlxiG5td2EJRe8vOmRbaXVgfcBlszKujvVmWIK/UpywWljQCfzAv3RQCGujWQ1RD4YYWEAqDXfuJiy8f5Q==", "dependencies": { - "@babel/helper-string-parser": "^7.23.4", - "@babel/helper-validator-identifier": "^7.22.20", + "@babel/helper-string-parser": "^7.24.7", + "@babel/helper-validator-identifier": "^7.24.7", "to-fast-properties": "^2.0.0" }, "engines": { "node": ">=6.9.0" } }, + "node_modules/@bcoe/v8-coverage": { + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz", + "integrity": "sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==", + "dev": true + }, "node_modules/@braintree/sanitize-url": { "version": "6.0.4", "resolved": "https://registry.npmjs.org/@braintree/sanitize-url/-/sanitize-url-6.0.4.tgz", @@ -960,6 +1240,23 @@ "resolved": "https://registry.npmjs.org/@floating-ui/utils/-/utils-0.2.1.tgz", "integrity": "sha512-9TANp6GPoMtYzQdt54kfAyMmz1+osLlXdg2ENroU7zzrtflTLrrC/lgrIfaSe+Wu0b89GKccT7vxXA0MoAIO+Q==" }, + "node_modules/@glideapps/glide-data-grid": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/@glideapps/glide-data-grid/-/glide-data-grid-6.0.3.tgz", + "integrity": "sha512-YXKggiNOaEemf0jP0jORq2EQKz+zXms+6mGzZc+q0mLMjmgzzoGLOQC1uYcynXSj1R61bd27JcPFsoH+Gj37Vg==", + "dependencies": { + "@linaria/react": "^4.5.3", + "canvas-hypertxt": "^1.0.3", + "react-number-format": "^5.0.0" + }, + "peerDependencies": { + "lodash": "^4.17.19", + "marked": "^4.0.10", + "react": "^16.12.0 || 17.x || 18.x", + "react-dom": "^16.12.0 || 17.x || 18.x", + "react-responsive-carousel": "^3.2.7" + } + }, "node_modules/@handsontable/pikaday": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/@handsontable/pikaday/-/pikaday-1.0.0.tgz", @@ -1014,11 +1311,194 @@ "react": "*" } }, + "node_modules/@istanbuljs/schema": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/@istanbuljs/schema/-/schema-0.1.3.tgz", + "integrity": "sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/@jest/schemas": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-29.6.3.tgz", + "integrity": "sha512-mo5j5X+jIZmJQveBKeS/clAueipV7KgiX1vMgCxam1RNYiqE1w62n0/tJJnHtjW8ZHcQco5gY85jA3mi0L+nSA==", + "dev": true, + "dependencies": { + "@sinclair/typebox": "^0.27.8" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jridgewell/gen-mapping": { + "version": "0.3.5", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.5.tgz", + "integrity": "sha512-IzL8ZoEDIBRWEzlCcRhOaCupYyN5gdIK+Q6fbFdPDg6HqX6jpkItn7DFIpW9LQzXG6Df9sA7+OKnq0qlz/GaQg==", + "dependencies": { + "@jridgewell/set-array": "^1.2.1", + "@jridgewell/sourcemap-codec": "^1.4.10", + "@jridgewell/trace-mapping": "^0.3.24" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/set-array": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.2.1.tgz", + "integrity": "sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A==", + "engines": { + "node": ">=6.0.0" + } + }, "node_modules/@jridgewell/sourcemap-codec": { "version": "1.4.15", "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz", "integrity": "sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==" }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.25", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz", + "integrity": "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==", + "dependencies": { + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" + } + }, + "node_modules/@linaria/core": { + "version": "4.5.4", + "resolved": "https://registry.npmjs.org/@linaria/core/-/core-4.5.4.tgz", + "integrity": "sha512-vMs/5iU0stxjfbBCxobIgY+wSQx4G8ukNwrhjPVD+6bF9QrTwi5rl0mKaCMxaGMjnfsLRiiM3i+hnWLIEYLdSg==", + "dependencies": { + "@linaria/logger": "^4.5.0", + "@linaria/tags": "^4.5.4", + "@linaria/utils": "^4.5.3" + }, + "engines": { + "node": "^12.16.0 || >=13.7.0" + } + }, + "node_modules/@linaria/logger": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/@linaria/logger/-/logger-4.5.0.tgz", + "integrity": "sha512-XdQLk242Cpcsc9a3Cz1ktOE5ysTo2TpxdeFQEPwMm8Z/+F/S6ZxBDdHYJL09srXWz3hkJr3oS2FPuMZNH1HIxw==", + "dependencies": { + "debug": "^4.1.1", + "picocolors": "^1.0.0" + }, + "engines": { + "node": "^12.16.0 || >=13.7.0" + } + }, + "node_modules/@linaria/react": { + "version": "4.5.4", + "resolved": "https://registry.npmjs.org/@linaria/react/-/react-4.5.4.tgz", + "integrity": "sha512-/dhCVCsfdGPfQCPV0q5yy+DDlFXepvfXrw/os2fC+Xo1v9J/9gyiaBBWHzcumauvNNFj8aN6vRkj89fMujPHew==", + "dependencies": { + "@emotion/is-prop-valid": "^1.2.0", + "@linaria/core": "^4.5.4", + "@linaria/tags": "^4.5.4", + "@linaria/utils": "^4.5.3", + "minimatch": "^9.0.3", + "react-html-attributes": "^1.4.6", + "ts-invariant": "^0.10.3" + }, + "engines": { + "node": "^12.16.0 || >=13.7.0" + }, + "peerDependencies": { + "react": ">=16" + } + }, + "node_modules/@linaria/react/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/@linaria/react/node_modules/minimatch": { + "version": "9.0.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.4.tgz", + "integrity": "sha512-KqWh+VchfxcMNRAJjj2tnsSJdNbHsVgnkBhTNrW7AjVo6OvLtxw8zfT9oLw1JSohlFzJ8jCoTgaoXvJ+kHt6fw==", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/@linaria/tags": { + "version": "4.5.4", + "resolved": "https://registry.npmjs.org/@linaria/tags/-/tags-4.5.4.tgz", + "integrity": "sha512-HPxLB6HlJWLi6o8+8lTLegOmDnbMbuzEE+zzunaPZEGSoIIYx8HAv5VbY/sG/zNyxDElk6laiAwEVWN8h5/zxg==", + "dependencies": { + "@babel/generator": "^7.22.9", + "@linaria/logger": "^4.5.0", + "@linaria/utils": "^4.5.3" + }, + "engines": { + "node": "^12.16.0 || >=13.7.0" + } + }, + "node_modules/@linaria/utils": { + "version": "4.5.3", + "resolved": "https://registry.npmjs.org/@linaria/utils/-/utils-4.5.3.tgz", + "integrity": "sha512-tSpxA3Zn0DKJ2n/YBnYAgiDY+MNvkmzAHrD8R9PKrpGaZ+wz1jQEmE1vGn1cqh8dJyWK0NzPAA8sf1cqa+RmAg==", + "dependencies": { + "@babel/core": "^7.22.9", + "@babel/generator": "^7.22.9", + "@babel/plugin-proposal-export-namespace-from": "^7.18.9", + "@babel/plugin-syntax-dynamic-import": "^7.8.3", + "@babel/plugin-transform-modules-commonjs": "^7.22.5", + "@babel/template": "^7.22.5", + "@babel/traverse": "^7.22.8", + "@babel/types": "^7.22.5", + "@linaria/logger": "^4.5.0", + "babel-merge": "^3.0.0", + "find-up": "^5.0.0", + "minimatch": "^9.0.3" + }, + "engines": { + "node": "^12.16.0 || >=13.7.0" + } + }, + "node_modules/@linaria/utils/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/@linaria/utils/node_modules/minimatch": { + "version": "9.0.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.4.tgz", + "integrity": "sha512-KqWh+VchfxcMNRAJjj2tnsSJdNbHsVgnkBhTNrW7AjVo6OvLtxw8zfT9oLw1JSohlFzJ8jCoTgaoXvJ+kHt6fw==", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/@mapbox/geojson-rewind": { "version": "0.5.2", "resolved": "https://registry.npmjs.org/@mapbox/geojson-rewind/-/geojson-rewind-0.5.2.tgz", @@ -1600,6 +2080,12 @@ "pick-by-alias": "^1.2.0" } }, + "node_modules/@polka/url": { + "version": "1.0.0-next.25", + "resolved": "https://registry.npmjs.org/@polka/url/-/url-1.0.0-next.25.tgz", + "integrity": "sha512-j7P6Rgr3mmtdkeDGTe0E/aYyWEWVtc5yFXtHCRHs28/jptDEWfaVOc5T7cblqy1XKPPfCxJc/8DwQ5YgLOZOVQ==", + "dev": true + }, "node_modules/@popperjs/core": { "version": "2.11.8", "resolved": "https://registry.npmjs.org/@popperjs/core/-/core-2.11.8.tgz", @@ -1810,6 +2296,12 @@ "win32" ] }, + "node_modules/@sinclair/typebox": { + "version": "0.27.8", + "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.27.8.tgz", + "integrity": "sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA==", + "dev": true + }, "node_modules/@sphinxxxx/color-conversion": { "version": "2.2.2", "resolved": "https://registry.npmjs.org/@sphinxxxx/color-conversion/-/color-conversion-2.2.2.tgz", @@ -3060,6 +3552,165 @@ "url": "https://github.com/sponsors/tannerlinsley" } }, + "node_modules/@testing-library/dom": { + "version": "10.1.0", + "resolved": "https://registry.npmjs.org/@testing-library/dom/-/dom-10.1.0.tgz", + "integrity": "sha512-wdsYKy5zupPyLCW2Je5DLHSxSfbIp6h80WoHOQc+RPtmPGA52O9x5MJEkv92Sjonpq+poOAtUKhh1kBGAXBrNA==", + "dev": true, + "peer": true, + "dependencies": { + "@babel/code-frame": "^7.10.4", + "@babel/runtime": "^7.12.5", + "@types/aria-query": "^5.0.1", + "aria-query": "5.3.0", + "chalk": "^4.1.0", + "dom-accessibility-api": "^0.5.9", + "lz-string": "^1.5.0", + "pretty-format": "^27.0.2" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@testing-library/dom/node_modules/ansi-styles": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", + "dev": true, + "peer": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/@testing-library/dom/node_modules/pretty-format": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-27.5.1.tgz", + "integrity": "sha512-Qb1gy5OrP5+zDf2Bvnzdl3jsTf1qXVMazbvCoKhtKqVs4/YK4ozX4gKQJJVyNe+cajNPn0KoC0MC3FUmaHWEmQ==", + "dev": true, + "peer": true, + "dependencies": { + "ansi-regex": "^5.0.1", + "ansi-styles": "^5.0.0", + "react-is": "^17.0.1" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/@testing-library/dom/node_modules/react-is": { + "version": "17.0.2", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-17.0.2.tgz", + "integrity": "sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==", + "dev": true, + "peer": true + }, + "node_modules/@testing-library/jest-dom": { + "version": "6.4.6", + "resolved": "https://registry.npmjs.org/@testing-library/jest-dom/-/jest-dom-6.4.6.tgz", + "integrity": "sha512-8qpnGVincVDLEcQXWaHOf6zmlbwTKc6Us6PPu4CRnPXCzo2OGBS5cwgMMOWdxDpEz1mkbvXHpEy99M5Yvt682w==", + "dev": true, + "dependencies": { + "@adobe/css-tools": "^4.4.0", + "@babel/runtime": "^7.9.2", + "aria-query": "^5.0.0", + "chalk": "^3.0.0", + "css.escape": "^1.5.1", + "dom-accessibility-api": "^0.6.3", + "lodash": "^4.17.21", + "redent": "^3.0.0" + }, + "engines": { + "node": ">=14", + "npm": ">=6", + "yarn": ">=1" + }, + "peerDependencies": { + "@jest/globals": ">= 28", + "@types/bun": "latest", + "@types/jest": ">= 28", + "jest": ">= 28", + "vitest": ">= 0.32" + }, + "peerDependenciesMeta": { + "@jest/globals": { + "optional": true + }, + "@types/bun": { + "optional": true + }, + "@types/jest": { + "optional": true + }, + "jest": { + "optional": true + }, + "vitest": { + "optional": true + } + } + }, + "node_modules/@testing-library/jest-dom/node_modules/chalk": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz", + "integrity": "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@testing-library/jest-dom/node_modules/dom-accessibility-api": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/dom-accessibility-api/-/dom-accessibility-api-0.6.3.tgz", + "integrity": "sha512-7ZgogeTnjuHbo+ct10G9Ffp0mif17idi0IyWNVA/wcwcm7NPOD/WEHVP3n7n3MhXqxoIYm8d6MuZohYWIZ4T3w==", + "dev": true + }, + "node_modules/@testing-library/react": { + "version": "16.0.0", + "resolved": "https://registry.npmjs.org/@testing-library/react/-/react-16.0.0.tgz", + "integrity": "sha512-guuxUKRWQ+FgNX0h0NS0FIq3Q3uLtWVpBzcLOggmfMoUpgBnzBzvLLd4fbm6yS8ydJd94cIfY4yP9qUQjM2KwQ==", + "dev": true, + "dependencies": { + "@babel/runtime": "^7.12.5" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@testing-library/dom": "^10.0.0", + "@types/react": "^18.0.0", + "@types/react-dom": "^18.0.0", + "react": "^18.0.0", + "react-dom": "^18.0.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@testing-library/user-event": { + "version": "14.5.2", + "resolved": "https://registry.npmjs.org/@testing-library/user-event/-/user-event-14.5.2.tgz", + "integrity": "sha512-YAh82Wh4TIrxYLmfGcixwD18oIjyC1pFQC2Y01F2lzV2HTMiYrI0nze0FD0ocB//CKS/7jIUgae+adPqxK5yCQ==", + "dev": true, + "engines": { + "node": ">=12", + "npm": ">=6" + }, + "peerDependencies": { + "@testing-library/dom": ">=7.21.4" + } + }, "node_modules/@total-typescript/ts-reset": { "version": "0.5.1", "resolved": "https://registry.npmjs.org/@total-typescript/ts-reset/-/ts-reset-0.5.1.tgz", @@ -3127,6 +3778,13 @@ "integrity": "sha512-YPF9S7fzpuyrxru+sG/rrTpZkC6gpHBPF14W3x70kqVOD+ks6jkYLapk4yceh36xej7K4HYxcyz9ZDQ2lTvwgQ==", "dev": true }, + "node_modules/@types/aria-query": { + "version": "5.0.4", + "resolved": "https://registry.npmjs.org/@types/aria-query/-/aria-query-5.0.4.tgz", + "integrity": "sha512-rfT93uj5s0PRL7EzccGMs3brplhcrghnDoV26NqKhCAS1hVo+WdNsPvE/yb6ilfr5hi2MEk6d5EWJTKdxg8jVw==", + "dev": true, + "peer": true + }, "node_modules/@types/d3": { "version": "5.16.0", "resolved": "https://registry.npmjs.org/@types/d3/-/d3-5.16.0.tgz", @@ -3659,6 +4317,16 @@ "@types/react": "*" } }, + "node_modules/@types/testing-library__jest-dom": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/@types/testing-library__jest-dom/-/testing-library__jest-dom-6.0.0.tgz", + "integrity": "sha512-bnreXCgus6IIadyHNlN/oI5FfX4dWgvGhOPvpr7zzCYDGAPIfvyIoAozMBINmhmsVuqV0cncejF2y5KC7ScqOg==", + "deprecated": "This is a stub types definition. @testing-library/jest-dom provides its own type definitions, so you do not need this installed.", + "dev": true, + "dependencies": { + "@testing-library/jest-dom": "*" + } + }, "node_modules/@types/unist": { "version": "2.0.10", "resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.10.tgz", @@ -3882,6 +4550,150 @@ "vite": "^4 || ^5" } }, + "node_modules/@vitest/coverage-v8": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/@vitest/coverage-v8/-/coverage-v8-1.6.0.tgz", + "integrity": "sha512-KvapcbMY/8GYIG0rlwwOKCVNRc0OL20rrhFkg/CHNzncV03TE2XWvO5w9uZYoxNiMEBacAJt3unSOiZ7svePew==", + "dev": true, + "dependencies": { + "@ampproject/remapping": "^2.2.1", + "@bcoe/v8-coverage": "^0.2.3", + "debug": "^4.3.4", + "istanbul-lib-coverage": "^3.2.2", + "istanbul-lib-report": "^3.0.1", + "istanbul-lib-source-maps": "^5.0.4", + "istanbul-reports": "^3.1.6", + "magic-string": "^0.30.5", + "magicast": "^0.3.3", + "picocolors": "^1.0.0", + "std-env": "^3.5.0", + "strip-literal": "^2.0.0", + "test-exclude": "^6.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + }, + "peerDependencies": { + "vitest": "1.6.0" + } + }, + "node_modules/@vitest/expect": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-1.6.0.tgz", + "integrity": "sha512-ixEvFVQjycy/oNgHjqsL6AZCDduC+tflRluaHIzKIsdbzkLn2U/iBnVeJwB6HsIjQBdfMR8Z0tRxKUsvFJEeWQ==", + "dev": true, + "dependencies": { + "@vitest/spy": "1.6.0", + "@vitest/utils": "1.6.0", + "chai": "^4.3.10" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/runner": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-1.6.0.tgz", + "integrity": "sha512-P4xgwPjwesuBiHisAVz/LSSZtDjOTPYZVmNAnpHHSR6ONrf8eCJOFRvUwdHn30F5M1fxhqtl7QZQUk2dprIXAg==", + "dev": true, + "dependencies": { + "@vitest/utils": "1.6.0", + "p-limit": "^5.0.0", + "pathe": "^1.1.1" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/runner/node_modules/p-limit": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-5.0.0.tgz", + "integrity": "sha512-/Eaoq+QyLSiXQ4lyYV23f14mZRQcXnxfHrN0vCai+ak9G0pp9iEQukIIZq5NccEvwRB8PUnZT0KsOoDCINS1qQ==", + "dev": true, + "dependencies": { + "yocto-queue": "^1.0.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@vitest/runner/node_modules/yocto-queue": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-1.0.0.tgz", + "integrity": "sha512-9bnSc/HEW2uRy67wc+T8UwauLuPJVn28jb+GtJY16iiKWyvmYJRXVT4UamsAEGQfPohgr2q4Tq0sQbQlxTfi1g==", + "dev": true, + "engines": { + "node": ">=12.20" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@vitest/snapshot": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-1.6.0.tgz", + "integrity": "sha512-+Hx43f8Chus+DCmygqqfetcAZrDJwvTj0ymqjQq4CvmpKFSTVteEOBzCusu1x2tt4OJcvBflyHUE0DZSLgEMtQ==", + "dev": true, + "dependencies": { + "magic-string": "^0.30.5", + "pathe": "^1.1.1", + "pretty-format": "^29.7.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/spy": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-1.6.0.tgz", + "integrity": "sha512-leUTap6B/cqi/bQkXUu6bQV5TZPx7pmMBKBQiI0rJA8c3pB56ZsaTbREnF7CJfmvAS4V2cXIBAh/3rVwrrCYgw==", + "dev": true, + "dependencies": { + "tinyspy": "^2.2.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/ui": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/@vitest/ui/-/ui-1.6.0.tgz", + "integrity": "sha512-k3Lyo+ONLOgylctiGovRKy7V4+dIN2yxstX3eY5cWFXH6WP+ooVX79YSyi0GagdTQzLmT43BF27T0s6dOIPBXA==", + "dev": true, + "dependencies": { + "@vitest/utils": "1.6.0", + "fast-glob": "^3.3.2", + "fflate": "^0.8.1", + "flatted": "^3.2.9", + "pathe": "^1.1.1", + "picocolors": "^1.0.0", + "sirv": "^2.0.4" + }, + "funding": { + "url": "https://opencollective.com/vitest" + }, + "peerDependencies": { + "vitest": "1.6.0" + } + }, + "node_modules/@vitest/utils": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-1.6.0.tgz", + "integrity": "sha512-21cPiuGMoMZwiOHa2i4LXkMkMkCGzA+MVFV70jRwHo95dL4x/ts5GZhML1QWuy7yfp3WzK3lRvZi3JnXTYqrBw==", + "dev": true, + "dependencies": { + "diff-sequences": "^29.6.3", + "estree-walker": "^3.0.3", + "loupe": "^2.3.7", + "pretty-format": "^29.7.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, "node_modules/@xobotyi/scrollbar-width": { "version": "1.9.5", "resolved": "https://registry.npmjs.org/@xobotyi/scrollbar-width/-/scrollbar-width-1.9.5.tgz", @@ -3923,6 +4735,30 @@ "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" } }, + "node_modules/acorn-walk": { + "version": "8.3.3", + "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.3.3.tgz", + "integrity": "sha512-MxXdReSRhGO7VlFe1bRG/oI7/mdLV9B9JJT0N8vZOhF7gFRR5l3M8W9G8JxmKV+JC5mGqJ0QvqfSOLsCPa4nUw==", + "dev": true, + "dependencies": { + "acorn": "^8.11.0" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/agent-base": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.1.tgz", + "integrity": "sha512-H0TSyFNDMomMNJQBn8wFV5YC/2eJ+VXECwOadZJT554xP6cODZHPX3H9QMQECxvrgiSOP1pHjy1sMWQVYJOUOA==", + "dev": true, + "dependencies": { + "debug": "^4.3.4" + }, + "engines": { + "node": ">= 14" + } + }, "node_modules/ajv": { "version": "6.12.6", "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", @@ -3980,6 +4816,15 @@ "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==" }, + "node_modules/aria-query": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-5.3.0.tgz", + "integrity": "sha512-b0P0sZPKtyu8HkeRAfCq0IfURZK+SuwMjY1UXGBU27wpAiTwQAIlq56IbIO+ytk/JjS1fMR14ee5WBBfKi5J6A==", + "dev": true, + "dependencies": { + "dequal": "^2.0.3" + } + }, "node_modules/array-bounds": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/array-bounds/-/array-bounds-1.0.1.tgz", @@ -4127,6 +4972,15 @@ "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz", "integrity": "sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==" }, + "node_modules/assertion-error": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz", + "integrity": "sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw==", + "dev": true, + "engines": { + "node": "*" + } + }, "node_modules/asynciterator.prototype": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/asynciterator.prototype/-/asynciterator.prototype-1.0.0.tgz", @@ -4187,6 +5041,27 @@ "proxy-from-env": "^1.1.0" } }, + "node_modules/babel-merge": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/babel-merge/-/babel-merge-3.0.0.tgz", + "integrity": "sha512-eBOBtHnzt9xvnjpYNI5HmaPp/b2vMveE5XggzqHnQeHJ8mFIBrBv6WZEVIj5jJ2uwTItkqKo9gWzEEcBxEq0yw==", + "deprecated": "Package no longer supported. Contact Support at https://www.npmjs.com/support for more info.", + "dependencies": { + "deepmerge": "^2.2.1", + "object.omit": "^3.0.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/babel-merge/node_modules/deepmerge": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-2.2.1.tgz", + "integrity": "sha512-R9hc1Xa/NOBi9WRVUWg19rl1UB7Tt4kuPd+thNJgFZoxXsTz7ncaPaeIm+40oSGuP33DfMb4sZt1QIGiJzC4EA==", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/babel-plugin-macros": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/babel-plugin-macros/-/babel-plugin-macros-3.1.0.tgz", @@ -4282,6 +5157,37 @@ "node": ">=8" } }, + "node_modules/browserslist": { + "version": "4.23.1", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.23.1.tgz", + "integrity": "sha512-TUfofFo/KsK/bWZ9TWQ5O26tsWW4Uhmt8IYklbnUa70udB6P2wA7w7o4PY4muaEPBQaAX+CEnmmIA41NVHtPVw==", + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "dependencies": { + "caniuse-lite": "^1.0.30001629", + "electron-to-chromium": "^1.4.796", + "node-releases": "^2.0.14", + "update-browserslist-db": "^1.0.16" + }, + "bin": { + "browserslist": "cli.js" + }, + "engines": { + "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" + } + }, "node_modules/buffer": { "version": "5.7.1", "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", @@ -4323,6 +5229,15 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/cac": { + "version": "6.7.14", + "resolved": "https://registry.npmjs.org/cac/-/cac-6.7.14.tgz", + "integrity": "sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, "node_modules/call-bind": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.5.tgz", @@ -4344,6 +5259,25 @@ "node": ">=6" } }, + "node_modules/caniuse-lite": { + "version": "1.0.30001636", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001636.tgz", + "integrity": "sha512-bMg2vmr8XBsbL6Lr0UHXy/21m84FTxDLWn2FSqMd5PrlbMxwJlQnC2YWYxVgp66PZE+BBNF2jYQUBKCo1FDeZg==", + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/caniuse-lite" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ] + }, "node_modules/canvas-fit": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/canvas-fit/-/canvas-fit-1.5.0.tgz", @@ -4352,6 +5286,29 @@ "element-size": "^1.1.1" } }, + "node_modules/canvas-hypertxt": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/canvas-hypertxt/-/canvas-hypertxt-1.0.3.tgz", + "integrity": "sha512-+VsMpRr64jYgKq2IeFUNel3vCZH/IzS+iXSHxmUV3IUH5dXlC9xHz4AwtPZisDxZ5MWcuK0V+TXgPKFPiZnxzg==" + }, + "node_modules/chai": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/chai/-/chai-4.4.1.tgz", + "integrity": "sha512-13sOfMv2+DWduEU+/xbun3LScLoqN17nBeTLUsmDfKdoiC1fr0n9PU4guu4AhRcOVFk/sW8LyZWHuhWtQZiF+g==", + "dev": true, + "dependencies": { + "assertion-error": "^1.1.0", + "check-error": "^1.0.3", + "deep-eql": "^4.1.3", + "get-func-name": "^2.0.2", + "loupe": "^2.3.6", + "pathval": "^1.1.1", + "type-detect": "^4.0.8" + }, + "engines": { + "node": ">=4" + } + }, "node_modules/chalk": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", @@ -4394,6 +5351,18 @@ "url": "https://github.com/sponsors/wooorm" } }, + "node_modules/check-error": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/check-error/-/check-error-1.0.3.tgz", + "integrity": "sha512-iKEoDYaRmd1mxM90a2OEfWhjsjPpYPuQ+lMYsoxB126+t8fw7ySEO48nmDg5COTjxDI65/Y2OWpeEHk3ZOe8zg==", + "dev": true, + "dependencies": { + "get-func-name": "^2.0.2" + }, + "engines": { + "node": "*" + } + }, "node_modules/chevrotain": { "version": "6.5.0", "resolved": "https://registry.npmjs.org/chevrotain/-/chevrotain-6.5.0.tgz", @@ -4565,6 +5534,12 @@ "typedarray": "^0.0.6" } }, + "node_modules/confbox": { + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/confbox/-/confbox-0.1.7.tgz", + "integrity": "sha512-uJcB/FKZtBMCJpK8MQji6bJHgu1tixKPxRLeGkNzBoOZzpnZUJm0jm2/sBDWcuBx1dYgxV4JU+g5hmNxCyAmdA==", + "dev": true + }, "node_modules/convert-source-map": { "version": "1.9.0", "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.9.0.tgz", @@ -4744,6 +5719,24 @@ "resolved": "https://registry.npmjs.org/csscolorparser/-/csscolorparser-1.0.3.tgz", "integrity": "sha512-umPSgYwZkdFoUrH5hIq5kf0wPSXiro51nPw0j2K/c83KflkPSTBGMz6NJvMB+07VlL0y7VPo6QJcDjcgKTTm3w==" }, + "node_modules/cssstyle": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/cssstyle/-/cssstyle-4.0.1.tgz", + "integrity": "sha512-8ZYiJ3A/3OkDd093CBT/0UKDWry7ak4BdPTFP2+QEP7cmhouyq/Up709ASSj2cK02BbZiMgk7kYjZNS4QP5qrQ==", + "dev": true, + "dependencies": { + "rrweb-cssom": "^0.6.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/cssstyle/node_modules/rrweb-cssom": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/rrweb-cssom/-/rrweb-cssom-0.6.0.tgz", + "integrity": "sha512-APM0Gt1KoXBz0iIkkdB/kfvGOwC4UuJFeG/c+yV7wSc7q96cG/kJ0HiYCnzivD9SB53cLV1MlHFNfOuPaadYSw==", + "dev": true + }, "node_modules/csstype": { "version": "3.1.3", "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.3.tgz", @@ -5051,6 +6044,53 @@ "d3-transition": "1" } }, + "node_modules/data-urls": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/data-urls/-/data-urls-5.0.0.tgz", + "integrity": "sha512-ZYP5VBHshaDAiVZxjbRVcFJpc+4xGgT0bK3vzy1HLN8jTO975HEbuYzZJcHoQEY5K1a0z8YayJkyVETa08eNTg==", + "dev": true, + "dependencies": { + "whatwg-mimetype": "^4.0.0", + "whatwg-url": "^14.0.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/data-urls/node_modules/tr46": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-5.0.0.tgz", + "integrity": "sha512-tk2G5R2KRwBd+ZN0zaEXpmzdKyOYksXwywulIX95MBODjSzMIuQnQ3m8JxgbhnL1LeVo7lqQKsYa1O3Htl7K5g==", + "dev": true, + "dependencies": { + "punycode": "^2.3.1" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/data-urls/node_modules/webidl-conversions": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-7.0.0.tgz", + "integrity": "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==", + "dev": true, + "engines": { + "node": ">=12" + } + }, + "node_modules/data-urls/node_modules/whatwg-url": { + "version": "14.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-14.0.0.tgz", + "integrity": "sha512-1lfMEm2IEr7RIV+f4lUNPOqfFL+pO+Xw3fJSqmjX9AbXcXcYOkCe1P6+9VBZB6n94af16NfZf+sSk0JCBZC9aw==", + "dev": true, + "dependencies": { + "tr46": "^5.0.0", + "webidl-conversions": "^7.0.0" + }, + "engines": { + "node": ">=18" + } + }, "node_modules/debug": { "version": "4.3.4", "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", @@ -5067,6 +6107,12 @@ } } }, + "node_modules/decimal.js": { + "version": "10.4.3", + "resolved": "https://registry.npmjs.org/decimal.js/-/decimal.js-10.4.3.tgz", + "integrity": "sha512-VBBaLc1MgL5XpzgIP7ny5Z6Nx3UrRkIViUkPUdtl9aya5amy3De1gsUUSB1g3+3sExYNjCAsAznmukyxCb1GRA==", + "dev": true + }, "node_modules/decompress-response": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz", @@ -5082,6 +6128,18 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/deep-eql": { + "version": "4.1.4", + "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-4.1.4.tgz", + "integrity": "sha512-SUwdGfqdKOwxCPeVYjwSyRpJ7Z+fhpwIAtmCUdZIWZ/YP5R9WAsyuSgpLVDi9bjWoN2LXHNss/dk3urXtdQxGg==", + "dev": true, + "dependencies": { + "type-detect": "^4.0.0" + }, + "engines": { + "node": ">=6" + } + }, "node_modules/deep-extend": { "version": "0.6.0", "resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.6.0.tgz", @@ -5150,6 +6208,15 @@ "node": ">=0.4.0" } }, + "node_modules/dequal": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz", + "integrity": "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==", + "dev": true, + "engines": { + "node": ">=6" + } + }, "node_modules/detect-kerning": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/detect-kerning/-/detect-kerning-2.1.2.tgz", @@ -5164,6 +6231,15 @@ "node": ">=8" } }, + "node_modules/diff-sequences": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/diff-sequences/-/diff-sequences-29.6.3.tgz", + "integrity": "sha512-EjePK1srD3P08o2j4f0ExnylqRs5B9tJjcp9t1krH2qRi8CCdsYfwe9JgSLurFBWwq4uOlipzfk5fHNvwFKr8Q==", + "dev": true, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, "node_modules/dir-glob": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", @@ -5188,6 +6264,13 @@ "node": ">=6.0.0" } }, + "node_modules/dom-accessibility-api": { + "version": "0.5.16", + "resolved": "https://registry.npmjs.org/dom-accessibility-api/-/dom-accessibility-api-0.5.16.tgz", + "integrity": "sha512-X7BJ2yElsnOJ30pZF4uIIDfBEVgF4XEBxL9Bxhy6dnrm5hkzqmsWHGTiHqRiITNhMyFLyAiWndIJP7Z1NTteDg==", + "dev": true, + "peer": true + }, "node_modules/dom-helpers": { "version": "5.2.1", "resolved": "https://registry.npmjs.org/dom-helpers/-/dom-helpers-5.2.1.tgz", @@ -5282,6 +6365,11 @@ "resolved": "https://registry.npmjs.org/earcut/-/earcut-2.2.4.tgz", "integrity": "sha512-/pjZsA1b4RPHbeWZQn66SWS8nZZWLQQ23oE3Eam7aroEFGEvwKAsJfZ9ytiEMycfzXWpca4FA9QIOehf7PocBQ==" }, + "node_modules/electron-to-chromium": { + "version": "1.4.805", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.805.tgz", + "integrity": "sha512-8W4UJwX/w9T0QSzINJckTKG6CYpAUTqsaWcWIsdud3I1FYJcMgW9QqT1/4CBff/pP/TihWh13OmiyY8neto6vw==" + }, "node_modules/element-size": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/element-size/-/element-size-1.1.1.tgz", @@ -5303,6 +6391,18 @@ "once": "^1.4.0" } }, + "node_modules/entities": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", + "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==", + "dev": true, + "engines": { + "node": ">=0.12" + }, + "funding": { + "url": "https://github.com/fb55/entities?sponsor=1" + } + }, "node_modules/error-ex": { "version": "1.3.2", "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", @@ -5516,6 +6616,14 @@ "@esbuild/win32-x64": "0.19.12" } }, + "node_modules/escalade": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.2.tgz", + "integrity": "sha512-ErCHMCae19vR8vQGe50xIsVomy19rg6gFu3+r3jkEO46suLMWBksvVyoGgQV+jOfl84ZSOSlmv6Gxa89PmTGmA==", + "engines": { + "node": ">=6" + } + }, "node_modules/escape-string-regexp": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", @@ -5853,6 +6961,15 @@ "node": ">=4.0" } }, + "node_modules/estree-walker": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-3.0.3.tgz", + "integrity": "sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==", + "dev": true, + "dependencies": { + "@types/estree": "^1.0.0" + } + }, "node_modules/esutils": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", @@ -5869,6 +6986,41 @@ "node": ">=0.8.x" } }, + "node_modules/execa": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/execa/-/execa-8.0.1.tgz", + "integrity": "sha512-VyhnebXciFV2DESc+p6B+y0LjSm0krU4OgJN44qFAhBY0TJ+1V61tYD2+wHusZ6F9n5K+vl8k0sTy7PEfV4qpg==", + "dev": true, + "dependencies": { + "cross-spawn": "^7.0.3", + "get-stream": "^8.0.1", + "human-signals": "^5.0.0", + "is-stream": "^3.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^5.1.0", + "onetime": "^6.0.0", + "signal-exit": "^4.1.0", + "strip-final-newline": "^3.0.0" + }, + "engines": { + "node": ">=16.17" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, + "node_modules/execa/node_modules/get-stream": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-8.0.1.tgz", + "integrity": "sha512-VaUJspBffn/LMCJVoMvSAdmscJyS1auj5Zulnn5UoYcY531UWmdwhRWkcGKnGU93m5HSXP9LP2usOryrBtQowA==", + "dev": true, + "engines": { + "node": ">=16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/expand-template": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/expand-template/-/expand-template-2.0.3.tgz", @@ -6077,6 +7229,12 @@ "resolved": "https://registry.npmjs.org/fbjs-css-vars/-/fbjs-css-vars-1.0.2.tgz", "integrity": "sha512-b2XGFAFdWZWg0phtAWLHCk836A1Xann+I+Dgd3Gk64MHKZO44FfoD1KxyvbSh0qZsIoXQGGlVztIY+oitJPpRQ==" }, + "node_modules/fflate": { + "version": "0.8.2", + "resolved": "https://registry.npmjs.org/fflate/-/fflate-0.8.2.tgz", + "integrity": "sha512-cPJU47OaAoCbg0pBvzsgpTPhmhqI5eJjh/JIu8tPj5q+T7iLvW/JAYUqmE7KOB4R1ZyEhzBaIQpQpardBF5z8A==", + "dev": true + }, "node_modules/file-entry-cache": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", @@ -6120,7 +7278,6 @@ "version": "5.0.0", "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", - "dev": true, "dependencies": { "locate-path": "^6.0.0", "path-exists": "^4.0.0" @@ -6364,6 +7521,14 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/gensync": { + "version": "1.0.0-beta.2", + "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", + "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", + "engines": { + "node": ">=6.9.0" + } + }, "node_modules/geojson-vt": { "version": "3.2.1", "resolved": "https://registry.npmjs.org/geojson-vt/-/geojson-vt-3.2.1.tgz", @@ -6374,6 +7539,15 @@ "resolved": "https://registry.npmjs.org/get-canvas-context/-/get-canvas-context-1.0.2.tgz", "integrity": "sha512-LnpfLf/TNzr9zVOGiIY6aKCz8EKuXmlYNV7CM2pUjBa/B+c2I15tS7KLySep75+FuerJdmArvJLcsAXWEy2H0A==" }, + "node_modules/get-func-name": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.2.tgz", + "integrity": "sha512-8vXOvuE167CtIc3OyItco7N/dpRtBbYOsPsXCz7X/PMnlGjYjSGuZJgM1Y7mmew7BKf9BqvLX2tnOVy1BBUsxQ==", + "dev": true, + "engines": { + "node": "*" + } + }, "node_modules/get-intrinsic": { "version": "1.2.2", "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.2.tgz", @@ -6943,6 +8117,29 @@ "resolved": "https://registry.npmjs.org/hsluv/-/hsluv-0.0.3.tgz", "integrity": "sha512-08iL2VyCRbkQKBySkSh6m8zMUa3sADAxGVWs3Z1aPcUkTJeK0ETG4Fc27tEmQBGUAXZjIsXOZqBvacuVNSC/fQ==" }, + "node_modules/html-element-attributes": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/html-element-attributes/-/html-element-attributes-1.3.1.tgz", + "integrity": "sha512-UrRKgp5sQmRnDy4TEwAUsu14XBUlzKB8U3hjIYDjcZ3Hbp86Jtftzxfgrv6E/ii/h78tsaZwAnAE8HwnHr0dPA==" + }, + "node_modules/html-encoding-sniffer": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/html-encoding-sniffer/-/html-encoding-sniffer-4.0.0.tgz", + "integrity": "sha512-Y22oTqIU4uuPgEemfz7NDJz6OeKf12Lsu+QC+s3BVpda64lTiMYCyGwg5ki4vFxkMwQdeZDl2adZoqUgdFuTgQ==", + "dev": true, + "dependencies": { + "whatwg-encoding": "^3.1.1" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/html-escaper": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz", + "integrity": "sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==", + "dev": true + }, "node_modules/html-parse-stringify": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/html-parse-stringify/-/html-parse-stringify-3.0.1.tgz", @@ -6951,6 +8148,41 @@ "void-elements": "3.1.0" } }, + "node_modules/http-proxy-agent": { + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-7.0.2.tgz", + "integrity": "sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig==", + "dev": true, + "dependencies": { + "agent-base": "^7.1.0", + "debug": "^4.3.4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/https-proxy-agent": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.4.tgz", + "integrity": "sha512-wlwpilI7YdjSkWaQ/7omYBMTliDcmCN8OLihO6I9B86g06lMyAoqgoDpV0XqoaPOKj+0DIdAvnsWfyAAhmimcg==", + "dev": true, + "dependencies": { + "agent-base": "^7.0.2", + "debug": "4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/human-signals": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-5.0.0.tgz", + "integrity": "sha512-AXcZb6vzzrFAUE61HnN4mpLqd/cSIwNQjtNWR0euPm6y0iqx3G4gOXaIDdtdDwZmhwe82LA6+zinmW4UBWVePQ==", + "dev": true, + "engines": { + "node": ">=16.17.0" + } + }, "node_modules/husky": { "version": "8.0.3", "resolved": "https://registry.npmjs.org/husky/-/husky-8.0.3.tgz", @@ -7108,6 +8340,15 @@ "node": ">=0.8.19" } }, + "node_modules/indent-string": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", + "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, "node_modules/inflight": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", @@ -7324,6 +8565,28 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/is-extendable": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-1.0.1.tgz", + "integrity": "sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA==", + "dependencies": { + "is-plain-object": "^2.0.4" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-extendable/node_modules/is-plain-object": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-2.0.4.tgz", + "integrity": "sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==", + "dependencies": { + "isobject": "^3.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/is-extglob": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", @@ -7490,6 +8753,12 @@ "node": ">=0.10.0" } }, + "node_modules/is-potential-custom-element-name": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz", + "integrity": "sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ==", + "dev": true + }, "node_modules/is-regex": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", @@ -7527,6 +8796,18 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/is-stream": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-3.0.0.tgz", + "integrity": "sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA==", + "dev": true, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/is-string": { "version": "1.0.7", "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.7.tgz", @@ -7599,44 +8880,102 @@ "dependencies": { "call-bind": "^1.0.2" }, - "funding": { - "url": "https://github.com/sponsors/ljharb" + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-weakset": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/is-weakset/-/is-weakset-2.0.2.tgz", + "integrity": "sha512-t2yVvttHkQktwnNNmBQ98AhENLdPUTDTE21uPqAQ0ARwQfGeQKRVS0NNurH7bTf7RrvcVn1OOge45CnBeHCSmg==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "get-intrinsic": "^1.1.1" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-wsl": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-2.2.0.tgz", + "integrity": "sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww==", + "dependencies": { + "is-docker": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/isarray": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz", + "integrity": "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==" + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==" + }, + "node_modules/isobject": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/isobject/-/isobject-3.0.1.tgz", + "integrity": "sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/istanbul-lib-coverage": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.2.tgz", + "integrity": "sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/istanbul-lib-report": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.1.tgz", + "integrity": "sha512-GCfE1mtsHGOELCU8e/Z7YWzpmybrx/+dSTfLrvY8qRmaY6zXTKWn6WQIjaAFw069icm6GVMNkgu0NzI4iPZUNw==", + "dev": true, + "dependencies": { + "istanbul-lib-coverage": "^3.0.0", + "make-dir": "^4.0.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" } }, - "node_modules/is-weakset": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/is-weakset/-/is-weakset-2.0.2.tgz", - "integrity": "sha512-t2yVvttHkQktwnNNmBQ98AhENLdPUTDTE21uPqAQ0ARwQfGeQKRVS0NNurH7bTf7RrvcVn1OOge45CnBeHCSmg==", + "node_modules/istanbul-lib-source-maps": { + "version": "5.0.4", + "resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-5.0.4.tgz", + "integrity": "sha512-wHOoEsNJTVltaJp8eVkm8w+GVkVNHT2YDYo53YdzQEL2gWm1hBX5cGFR9hQJtuGLebidVX7et3+dmDZrmclduw==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "get-intrinsic": "^1.1.1" + "@jridgewell/trace-mapping": "^0.3.23", + "debug": "^4.1.1", + "istanbul-lib-coverage": "^3.0.0" }, - "funding": { - "url": "https://github.com/sponsors/ljharb" + "engines": { + "node": ">=10" } }, - "node_modules/is-wsl": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-2.2.0.tgz", - "integrity": "sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww==", + "node_modules/istanbul-reports": { + "version": "3.1.7", + "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.1.7.tgz", + "integrity": "sha512-BewmUXImeuRk2YY0PVbxgKAysvhRPUQE0h5QRM++nVWyubKGV0l8qQ5op8+B2DOmwSe63Jivj0BjkPQVf8fP5g==", + "dev": true, "dependencies": { - "is-docker": "^2.0.0" + "html-escaper": "^2.0.0", + "istanbul-lib-report": "^3.0.0" }, "engines": { "node": ">=8" } }, - "node_modules/isarray": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz", - "integrity": "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==" - }, - "node_modules/isexe": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==" - }, "node_modules/iterator.prototype": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/iterator.prototype/-/iterator.prototype-1.1.2.tgz", @@ -7701,6 +9040,91 @@ "node": ">=12.0.0" } }, + "node_modules/jsdom": { + "version": "24.1.0", + "resolved": "https://registry.npmjs.org/jsdom/-/jsdom-24.1.0.tgz", + "integrity": "sha512-6gpM7pRXCwIOKxX47cgOyvyQDN/Eh0f1MeKySBV2xGdKtqJBLj8P25eY3EVCWo2mglDDzozR2r2MW4T+JiNUZA==", + "dev": true, + "dependencies": { + "cssstyle": "^4.0.1", + "data-urls": "^5.0.0", + "decimal.js": "^10.4.3", + "form-data": "^4.0.0", + "html-encoding-sniffer": "^4.0.0", + "http-proxy-agent": "^7.0.2", + "https-proxy-agent": "^7.0.4", + "is-potential-custom-element-name": "^1.0.1", + "nwsapi": "^2.2.10", + "parse5": "^7.1.2", + "rrweb-cssom": "^0.7.0", + "saxes": "^6.0.0", + "symbol-tree": "^3.2.4", + "tough-cookie": "^4.1.4", + "w3c-xmlserializer": "^5.0.0", + "webidl-conversions": "^7.0.0", + "whatwg-encoding": "^3.1.1", + "whatwg-mimetype": "^4.0.0", + "whatwg-url": "^14.0.0", + "ws": "^8.17.0", + "xml-name-validator": "^5.0.0" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "canvas": "^2.11.2" + }, + "peerDependenciesMeta": { + "canvas": { + "optional": true + } + } + }, + "node_modules/jsdom/node_modules/tr46": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-5.0.0.tgz", + "integrity": "sha512-tk2G5R2KRwBd+ZN0zaEXpmzdKyOYksXwywulIX95MBODjSzMIuQnQ3m8JxgbhnL1LeVo7lqQKsYa1O3Htl7K5g==", + "dev": true, + "dependencies": { + "punycode": "^2.3.1" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/jsdom/node_modules/webidl-conversions": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-7.0.0.tgz", + "integrity": "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==", + "dev": true, + "engines": { + "node": ">=12" + } + }, + "node_modules/jsdom/node_modules/whatwg-url": { + "version": "14.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-14.0.0.tgz", + "integrity": "sha512-1lfMEm2IEr7RIV+f4lUNPOqfFL+pO+Xw3fJSqmjX9AbXcXcYOkCe1P6+9VBZB6n94af16NfZf+sSk0JCBZC9aw==", + "dev": true, + "dependencies": { + "tr46": "^5.0.0", + "webidl-conversions": "^7.0.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/jsesc": { + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz", + "integrity": "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==", + "bin": { + "jsesc": "bin/jsesc" + }, + "engines": { + "node": ">=4" + } + }, "node_modules/json-buffer": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", @@ -7745,6 +9169,17 @@ "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", "dev": true }, + "node_modules/json5": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", + "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", + "bin": { + "json5": "lib/cli.js" + }, + "engines": { + "node": ">=6" + } + }, "node_modules/jsoneditor": { "version": "9.10.4", "resolved": "https://registry.npmjs.org/jsoneditor/-/jsoneditor-9.10.4.tgz", @@ -7848,11 +9283,26 @@ "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==" }, + "node_modules/local-pkg": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/local-pkg/-/local-pkg-0.5.0.tgz", + "integrity": "sha512-ok6z3qlYyCDS4ZEU27HaU6x/xZa9Whf8jD4ptH5UZTQYZVYeb9bnZ3ojVhiJNLiXK1Hfc0GNbLXcmZ5plLDDBg==", + "dev": true, + "dependencies": { + "mlly": "^1.4.2", + "pkg-types": "^1.0.3" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/antfu" + } + }, "node_modules/locate-path": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", - "dev": true, "dependencies": { "p-locate": "^5.0.0" }, @@ -7904,6 +9354,15 @@ "loose-envify": "cli.js" } }, + "node_modules/loupe": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/loupe/-/loupe-2.3.7.tgz", + "integrity": "sha512-zSMINGVYkdpYSOBmLi0D1Uo7JU9nVdQKrHxC8eYlV+9YKK9WePqAlL7lSlorG/U2Fw1w0hTBmaa/jrQ3UbPHtA==", + "dev": true, + "dependencies": { + "get-func-name": "^2.0.1" + } + }, "node_modules/lowlight": { "version": "1.20.0", "resolved": "https://registry.npmjs.org/lowlight/-/lowlight-1.20.0.tgz", @@ -7928,6 +9387,51 @@ "node": ">=10" } }, + "node_modules/lz-string": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/lz-string/-/lz-string-1.5.0.tgz", + "integrity": "sha512-h5bgJWpxJNswbU7qCrV0tIKQCaS3blPDrqKWx+QxzuzL1zGUzij9XCWLrSLsJPu5t+eWA/ycetzYAO5IOMcWAQ==", + "dev": true, + "peer": true, + "bin": { + "lz-string": "bin/bin.js" + } + }, + "node_modules/magic-string": { + "version": "0.30.10", + "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.10.tgz", + "integrity": "sha512-iIRwTIf0QKV3UAnYK4PU8uiEc4SRh5jX0mwpIwETPpHdhVM4f53RSwS/vXvN1JhGX+Cs7B8qIq3d6AH49O5fAQ==", + "dev": true, + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.4.15" + } + }, + "node_modules/magicast": { + "version": "0.3.4", + "resolved": "https://registry.npmjs.org/magicast/-/magicast-0.3.4.tgz", + "integrity": "sha512-TyDF/Pn36bBji9rWKHlZe+PZb6Mx5V8IHCSxk7X4aljM4e/vyDvZZYwHewdVaqiA0nb3ghfHU/6AUpDxWoER2Q==", + "dev": true, + "dependencies": { + "@babel/parser": "^7.24.4", + "@babel/types": "^7.24.0", + "source-map-js": "^1.2.0" + } + }, + "node_modules/make-dir": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-4.0.0.tgz", + "integrity": "sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw==", + "dev": true, + "dependencies": { + "semver": "^7.5.3" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/map-limit": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/map-limit/-/map-limit-0.0.1.tgz", @@ -7977,6 +9481,18 @@ "node": ">=6.4.0" } }, + "node_modules/marked": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/marked/-/marked-4.3.0.tgz", + "integrity": "sha512-PRsaiG84bK+AMvxziE/lCFss8juXjNaWzVbN5tXAm4XjeaS9NAHhop+PjQxz2A9h8Q4M/xGmzP8vqNwy6JeK0A==", + "peer": true, + "bin": { + "marked": "bin/marked.js" + }, + "engines": { + "node": ">= 12" + } + }, "node_modules/material-colors": { "version": "1.2.6", "resolved": "https://registry.npmjs.org/material-colors/-/material-colors-1.2.6.tgz", @@ -8027,6 +9543,12 @@ "resolved": "https://registry.npmjs.org/memoize-one/-/memoize-one-5.2.1.tgz", "integrity": "sha512-zYiwtZUcYyXKo/np96AGZAckk+FWWsUdJ3cHGGmld7+AhvcWmQyGCYUh1hc4Q/pkOhb65dQR/pqCyK0cOaHz4Q==" }, + "node_modules/merge-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", + "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", + "dev": true + }, "node_modules/merge2": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", @@ -8067,6 +9589,18 @@ "node": ">= 0.6" } }, + "node_modules/mimic-fn": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-4.0.0.tgz", + "integrity": "sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/mimic-response": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz", @@ -8079,6 +9613,15 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/min-indent": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/min-indent/-/min-indent-1.0.1.tgz", + "integrity": "sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg==", + "dev": true, + "engines": { + "node": ">=4" + } + }, "node_modules/minim": { "version": "0.23.8", "resolved": "https://registry.npmjs.org/minim/-/minim-0.23.8.tgz", @@ -8115,6 +9658,18 @@ "integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==", "optional": true }, + "node_modules/mlly": { + "version": "1.7.1", + "resolved": "https://registry.npmjs.org/mlly/-/mlly-1.7.1.tgz", + "integrity": "sha512-rrVRZRELyQzrIUAVMHxP97kv+G786pHmOKzuFII8zDYahFBS7qnHh2AlYSl1GAHhaMPCz6/oHjVMcfFYgFYHgA==", + "dev": true, + "dependencies": { + "acorn": "^8.11.3", + "pathe": "^1.1.2", + "pkg-types": "^1.1.1", + "ufo": "^1.5.3" + } + }, "node_modules/mobius1-selectr": { "version": "2.4.13", "resolved": "https://registry.npmjs.org/mobius1-selectr/-/mobius1-selectr-2.4.13.tgz", @@ -8156,6 +9711,15 @@ "to-px": "^1.0.1" } }, + "node_modules/mrmime": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/mrmime/-/mrmime-2.0.0.tgz", + "integrity": "sha512-eu38+hdgojoyq63s+yTpN4XMBdt5l8HhMhc4VKLO9KM5caLIBvUm4thi7fFaxyTmCKeNnXZ5pAlBwCUnhA09uw==", + "dev": true, + "engines": { + "node": ">=10" + } + }, "node_modules/ms": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", @@ -8339,6 +9903,11 @@ "url": "https://opencollective.com/node-fetch" } }, + "node_modules/node-releases": { + "version": "2.0.14", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.14.tgz", + "integrity": "sha512-y10wOWt8yZpqXmOgRo77WaHEmhYQYGNA6y421PKsKYWEK8aW+cqAphborZDhqfyKrbZEN92CN1X2KbafY2s7Yw==" + }, "node_modules/normalize-svg-path": { "version": "0.1.0", "resolved": "https://registry.npmjs.org/normalize-svg-path/-/normalize-svg-path-0.1.0.tgz", @@ -8373,6 +9942,33 @@ "node": ">=6" } }, + "node_modules/npm-run-path": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-5.3.0.tgz", + "integrity": "sha512-ppwTtiJZq0O/ai0z7yfudtBpWIoxM8yE6nHi1X47eFR2EWORqfbu6CnPlNsjeN683eT0qG6H/Pyf9fCcvjnnnQ==", + "dev": true, + "dependencies": { + "path-key": "^4.0.0" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/npm-run-path/node_modules/path-key": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-4.0.0.tgz", + "integrity": "sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/number-is-integer": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/number-is-integer/-/number-is-integer-1.0.1.tgz", @@ -8395,6 +9991,12 @@ "node": "*" } }, + "node_modules/nwsapi": { + "version": "2.2.10", + "resolved": "https://registry.npmjs.org/nwsapi/-/nwsapi-2.2.10.tgz", + "integrity": "sha512-QK0sRs7MKv0tKe1+5uZIQk/C8XGza4DAnztJG8iD+TpJIORARrCxczA738awHrZoHeTjSSoHqao2teO0dC/gFQ==", + "dev": true + }, "node_modules/object-assign": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", @@ -8481,6 +10083,17 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/object.omit": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/object.omit/-/object.omit-3.0.0.tgz", + "integrity": "sha512-EO+BCv6LJfu+gBIF3ggLicFebFLN5zqzz/WWJlMFfkMyGth+oBkhxzDl0wx2W4GkLzuQs/FsSkXZb2IMWQqmBQ==", + "dependencies": { + "is-extendable": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/object.values": { "version": "1.1.7", "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.7.tgz", @@ -8506,6 +10119,21 @@ "wrappy": "1" } }, + "node_modules/onetime": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-6.0.0.tgz", + "integrity": "sha512-1FlR+gjXK7X+AsAHso35MnyN5KqGwJRi/31ft6x0M194ht7S+rWAvd7PHss9xSKMzE0asv1pyIHaJYq+BbacAQ==", + "dev": true, + "dependencies": { + "mimic-fn": "^4.0.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/open": { "version": "7.4.2", "resolved": "https://registry.npmjs.org/open/-/open-7.4.2.tgz", @@ -8550,7 +10178,6 @@ "version": "3.1.0", "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", - "dev": true, "dependencies": { "yocto-queue": "^0.1.0" }, @@ -8565,7 +10192,6 @@ "version": "5.0.0", "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", - "dev": true, "dependencies": { "p-limit": "^3.0.2" }, @@ -8644,6 +10270,18 @@ "resolved": "https://registry.npmjs.org/parse-unit/-/parse-unit-1.0.1.tgz", "integrity": "sha512-hrqldJHokR3Qj88EIlV/kAyAi/G5R2+R56TBANxNMy0uPlYcttx0jnMW6Yx5KsKPSbC3KddM/7qQm3+0wEXKxg==" }, + "node_modules/parse5": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/parse5/-/parse5-7.1.2.tgz", + "integrity": "sha512-Czj1WaSVpaoj0wbhMzLmWD69anp2WH7FXMB9n1Sy8/ZFF9jolSQVMu1Ij5WIyGmcBmhk7EOndpO4mIpihVqAXw==", + "dev": true, + "dependencies": { + "entities": "^4.4.0" + }, + "funding": { + "url": "https://github.com/inikulin/parse5?sponsor=1" + } + }, "node_modules/patch-package": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/patch-package/-/patch-package-8.0.0.tgz", @@ -8704,7 +10342,6 @@ "version": "4.0.0", "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", - "dev": true, "engines": { "node": ">=8" } @@ -8738,6 +10375,21 @@ "node": ">=8" } }, + "node_modules/pathe": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/pathe/-/pathe-1.1.2.tgz", + "integrity": "sha512-whLdWMYL2TwI08hn8/ZqAbrVemu0LNaNNJZX73O6qaIdCTfXutsLhMkjdENX0qhsQ9uIimo4/aQOmXkoon2nDQ==", + "dev": true + }, + "node_modules/pathval": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/pathval/-/pathval-1.1.1.tgz", + "integrity": "sha512-Dp6zGqpTdETdR63lehJYPeIOqpiNBNtc7BpWSLrOje7UaIsE5aY92r/AunQA7rsXvet3lrJ3JnZX29UPTKXyKQ==", + "dev": true, + "engines": { + "node": "*" + } + }, "node_modules/pbf": { "version": "3.2.1", "resolved": "https://registry.npmjs.org/pbf/-/pbf-3.2.1.tgz", @@ -8761,10 +10413,9 @@ "integrity": "sha512-ESj2+eBxhGrcA1azgHs7lARG5+5iLakc/6nlfbpjcLl00HuuUOIuORhYXN4D1HfvMSKuVtFQjAlnwi1JHEeDIw==" }, "node_modules/picocolors": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz", - "integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==", - "dev": true + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.1.tgz", + "integrity": "sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew==" }, "node_modules/picomatch": { "version": "2.3.1", @@ -8787,6 +10438,17 @@ "resolved": "https://registry.npmjs.org/pikaday/-/pikaday-1.8.2.tgz", "integrity": "sha512-TNtsE+34BIax3WtkB/qqu5uepV1McKYEgvL3kWzU7aqPCpMEN6rBF3AOwu4WCwAealWlBGobXny/9kJb49C1ew==" }, + "node_modules/pkg-types": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/pkg-types/-/pkg-types-1.1.1.tgz", + "integrity": "sha512-ko14TjmDuQJ14zsotODv7dBlwxKhUKQEhuhmbqo1uCi9BB0Z2alo/wAXg6q1dTR5TyuqYyWhjtfe/Tsh+X28jQ==", + "dev": true, + "dependencies": { + "confbox": "^0.1.7", + "mlly": "^1.7.0", + "pathe": "^1.1.2" + } + }, "node_modules/plotly.js": { "version": "2.26.1", "resolved": "https://registry.npmjs.org/plotly.js/-/plotly.js-2.26.1.tgz", @@ -8959,6 +10621,32 @@ "node": ">=6.0.0" } }, + "node_modules/pretty-format": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-29.7.0.tgz", + "integrity": "sha512-Pdlw/oPxN+aXdmM9R00JVC9WVFoCLTKJvDVLgmJ+qAffBMxsV85l/Lu7sNx4zSzPyoL2euImuEwHhOXdEgNFZQ==", + "dev": true, + "dependencies": { + "@jest/schemas": "^29.6.3", + "ansi-styles": "^5.0.0", + "react-is": "^18.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/pretty-format/node_modules/ansi-styles": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, "node_modules/prismjs": { "version": "1.29.0", "resolved": "https://registry.npmjs.org/prismjs/-/prismjs-1.29.0.tgz", @@ -9035,6 +10723,12 @@ "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==" }, + "node_modules/psl": { + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/psl/-/psl-1.9.0.tgz", + "integrity": "sha512-E/ZsdU4HLs/68gYzgGTkMicWTLPdAftJLfJFlLUAAKZGkStNU72sZjT66SnMDVOfOWY/YAoiD7Jxa9iHvngcag==", + "dev": true + }, "node_modules/pump": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", @@ -9333,6 +11027,18 @@ "react": ">= 16.8 || 18.0.0" } }, + "node_modules/react-easy-swipe": { + "version": "0.0.21", + "resolved": "https://registry.npmjs.org/react-easy-swipe/-/react-easy-swipe-0.0.21.tgz", + "integrity": "sha512-OeR2jAxdoqUMHIn/nS9fgreI5hSpgGoL5ezdal4+oO7YSSgJR8ga+PkYGJrSrJ9MKlPcQjMQXnketrD7WNmNsg==", + "peer": true, + "dependencies": { + "prop-types": "^15.5.8" + }, + "engines": { + "node": ">= 6" + } + }, "node_modules/react-hook-form": { "version": "7.47.0", "resolved": "https://registry.npmjs.org/react-hook-form/-/react-hook-form-7.47.0.tgz", @@ -9348,6 +11054,14 @@ "react": "^16.8.0 || ^17 || ^18" } }, + "node_modules/react-html-attributes": { + "version": "1.4.6", + "resolved": "https://registry.npmjs.org/react-html-attributes/-/react-html-attributes-1.4.6.tgz", + "integrity": "sha512-uS3MmThNKFH2EZUQQw4k5pIcU7XIr208UE5dktrj/GOH1CMagqxDl4DCLpt3o2l9x+IB5nVYBeN3Cr4IutBXAg==", + "dependencies": { + "html-element-attributes": "^1.0.0" + } + }, "node_modules/react-i18next": { "version": "13.2.2", "resolved": "https://registry.npmjs.org/react-i18next/-/react-i18next-13.2.2.tgz", @@ -9423,6 +11137,18 @@ "resolved": "https://registry.npmjs.org/react-lifecycles-compat/-/react-lifecycles-compat-3.0.4.tgz", "integrity": "sha512-fBASbA6LnOU9dOU2eW7aQ8xmYBSXUIWr+UmF9b1efZBazGNO+rcXT/icdKnYm2pTwcRylVUYwW7H1PHfLekVzA==" }, + "node_modules/react-number-format": { + "version": "5.4.0", + "resolved": "https://registry.npmjs.org/react-number-format/-/react-number-format-5.4.0.tgz", + "integrity": "sha512-NWdICrqLhI7rAS8yUeLVd6Wr4cN7UjJ9IBTS0f/a9i7UB4x4Ti70kGnksBtZ7o4Z7YRbvCMMR/jQmkoOBa/4fg==", + "dependencies": { + "prop-types": "^15.7.2" + }, + "peerDependencies": { + "react": "^0.14 || ^15.0.0 || ^16.0.0 || ^17.0.0 || ^18.0.0", + "react-dom": "^0.14 || ^15.0.0 || ^16.0.0 || ^17.0.0 || ^18.0.0" + } + }, "node_modules/react-plotly.js": { "version": "2.6.0", "resolved": "https://registry.npmjs.org/react-plotly.js/-/react-plotly.js-2.6.0.tgz", @@ -9473,6 +11199,17 @@ } } }, + "node_modules/react-responsive-carousel": { + "version": "3.2.23", + "resolved": "https://registry.npmjs.org/react-responsive-carousel/-/react-responsive-carousel-3.2.23.tgz", + "integrity": "sha512-pqJLsBaKHWJhw/ItODgbVoziR2z4lpcJg+YwmRlSk4rKH32VE633mAtZZ9kDXjy4wFO+pgUZmDKPsPe1fPmHCg==", + "peer": true, + "dependencies": { + "classnames": "^2.2.5", + "prop-types": "^15.5.8", + "react-easy-swipe": "^0.0.21" + } + }, "node_modules/react-router": { "version": "6.3.0", "resolved": "https://registry.npmjs.org/react-router/-/react-router-6.3.0.tgz", @@ -9656,6 +11393,19 @@ "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" }, + "node_modules/redent": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/redent/-/redent-3.0.0.tgz", + "integrity": "sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg==", + "dev": true, + "dependencies": { + "indent-string": "^4.0.0", + "strip-indent": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/redux": { "version": "4.2.1", "resolved": "https://registry.npmjs.org/redux/-/redux-4.2.1.tgz", @@ -9971,6 +11721,12 @@ "fsevents": "~2.3.2" } }, + "node_modules/rrweb-cssom": { + "version": "0.7.1", + "resolved": "https://registry.npmjs.org/rrweb-cssom/-/rrweb-cssom-0.7.1.tgz", + "integrity": "sha512-TrEMa7JGdVm0UThDJSx7ddw5nVm3UJS9o9CCIZ72B1vSyEZoziDqBYP3XIoi/12lKrJR8rE3jeFHMok2F/Mnsg==", + "dev": true + }, "node_modules/rtl-css-js": { "version": "1.16.1", "resolved": "https://registry.npmjs.org/rtl-css-js/-/rtl-css-js-1.16.1.tgz", @@ -10071,6 +11827,18 @@ "resolved": "https://registry.npmjs.org/sax/-/sax-1.3.0.tgz", "integrity": "sha512-0s+oAmw9zLl1V1cS9BtZN7JAd0cW5e0QH4W3LWEK6a4LaLEA2OTpGYWDY+6XasBLtz6wkm3u1xRw95mRuJ59WA==" }, + "node_modules/saxes": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/saxes/-/saxes-6.0.0.tgz", + "integrity": "sha512-xAg7SOnEhrm5zI3puOOKyy1OMcMlIJZYNJY7xLBwSze0UjhPLnWfj2GF2EpT0jmzaJKIWKHLsaSSajf35bcYnA==", + "dev": true, + "dependencies": { + "xmlchars": "^2.2.0" + }, + "engines": { + "node": ">=v12.22.7" + } + }, "node_modules/scheduler": { "version": "0.23.0", "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.23.0.tgz", @@ -10218,6 +11986,24 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/siginfo": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/siginfo/-/siginfo-2.0.0.tgz", + "integrity": "sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g==", + "dev": true + }, + "node_modules/signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "dev": true, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/signum": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/signum/-/signum-1.0.0.tgz", @@ -10268,6 +12054,20 @@ "simple-concat": "^1.0.0" } }, + "node_modules/sirv": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/sirv/-/sirv-2.0.4.tgz", + "integrity": "sha512-94Bdh3cC2PKrbgSOUqTiGPWVZeSiXfKOVZNJniWoqrWrRkB1CJzBU3NEbiTsPcYy1lDsANA/THzS+9WBiy5nfQ==", + "dev": true, + "dependencies": { + "@polka/url": "^1.0.0-next.24", + "mrmime": "^2.0.0", + "totalist": "^3.0.0" + }, + "engines": { + "node": ">= 10" + } + }, "node_modules/slash": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", @@ -10286,9 +12086,9 @@ } }, "node_modules/source-map-js": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.0.2.tgz", - "integrity": "sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==", + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.0.tgz", + "integrity": "sha512-itJW8lvSA0TXEphiRoawsCksnlf8SyvmFzIhltqAHluXd88pkCd+cXJVHTDwdCr0IzwptSm035IHQktUu1QUMg==", "dev": true, "engines": { "node": ">=0.10.0" @@ -10351,6 +12151,12 @@ "node": "*" } }, + "node_modules/stackback": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/stackback/-/stackback-0.0.2.tgz", + "integrity": "sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==", + "dev": true + }, "node_modules/stackframe": { "version": "1.3.4", "resolved": "https://registry.npmjs.org/stackframe/-/stackframe-1.3.4.tgz", @@ -10396,6 +12202,12 @@ "escodegen": "^2.1.0" } }, + "node_modules/std-env": { + "version": "3.7.0", + "resolved": "https://registry.npmjs.org/std-env/-/std-env-3.7.0.tgz", + "integrity": "sha512-JPbdCEQLj1w5GilpiHAx3qJvFndqybBysA3qUOnznweH4QbNYUsW/ea8QzSrnh0vNsezMMw5bcVool8lM0gwzg==", + "dev": true + }, "node_modules/stream-parser": { "version": "0.3.1", "resolved": "https://registry.npmjs.org/stream-parser/-/stream-parser-0.3.1.tgz", @@ -10520,6 +12332,30 @@ "node": ">=8" } }, + "node_modules/strip-final-newline": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-3.0.0.tgz", + "integrity": "sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/strip-indent": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-indent/-/strip-indent-3.0.0.tgz", + "integrity": "sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ==", + "dev": true, + "dependencies": { + "min-indent": "^1.0.0" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/strip-json-comments": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", @@ -10532,6 +12368,24 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/strip-literal": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/strip-literal/-/strip-literal-2.1.0.tgz", + "integrity": "sha512-Op+UycaUt/8FbN/Z2TWPBLge3jWrP3xj10f3fnYxf052bKuS3EKs1ZQcVGjnEMdsNVAM+plXRdmjrZ/KgG3Skw==", + "dev": true, + "dependencies": { + "js-tokens": "^9.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/antfu" + } + }, + "node_modules/strip-literal/node_modules/js-tokens": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-9.0.0.tgz", + "integrity": "sha512-WriZw1luRMlmV3LGJaR6QOJjWwgLUTf89OwT2lUOyjX2dJGBwgmIkbcz+7WFZjrZM635JOIR517++e/67CP9dQ==", + "dev": true + }, "node_modules/strongly-connected-components": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/strongly-connected-components/-/strongly-connected-components-1.0.1.tgz", @@ -10702,6 +12556,12 @@ "immutable": "^3.8.1 || ^4.0.0-rc.1" } }, + "node_modules/symbol-tree": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/symbol-tree/-/symbol-tree-3.2.4.tgz", + "integrity": "sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw==", + "dev": true + }, "node_modules/synckit": { "version": "0.8.8", "resolved": "https://registry.npmjs.org/synckit/-/synckit-0.8.8.tgz", @@ -10771,6 +12631,20 @@ "node": ">= 6" } }, + "node_modules/test-exclude": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz", + "integrity": "sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==", + "dev": true, + "dependencies": { + "@istanbuljs/schema": "^0.1.2", + "glob": "^7.1.4", + "minimatch": "^3.0.4" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/text-table": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", @@ -10805,16 +12679,40 @@ "resolved": "https://registry.npmjs.org/tiny-invariant/-/tiny-invariant-1.3.1.tgz", "integrity": "sha512-AD5ih2NlSssTCwsMznbvwMZpJ1cbhkGd2uueNxzv2jDlEeZdU04JQfRnggJQ8DrcVBGjAsCKwFBbDlVNtEMlzw==" }, + "node_modules/tinybench": { + "version": "2.8.0", + "resolved": "https://registry.npmjs.org/tinybench/-/tinybench-2.8.0.tgz", + "integrity": "sha512-1/eK7zUnIklz4JUUlL+658n58XO2hHLQfSk1Zf2LKieUjxidN16eKFEoDEfjHc3ohofSSqK3X5yO6VGb6iW8Lw==", + "dev": true + }, "node_modules/tinycolor2": { "version": "1.6.0", "resolved": "https://registry.npmjs.org/tinycolor2/-/tinycolor2-1.6.0.tgz", "integrity": "sha512-XPaBkWQJdsf3pLKJV9p4qN/S+fm2Oj8AIPo1BTUhg5oxkvm9+SVEGFdhyOz7tTdUTfvxMiAs4sp6/eZO2Ew+pw==" }, + "node_modules/tinypool": { + "version": "0.8.4", + "resolved": "https://registry.npmjs.org/tinypool/-/tinypool-0.8.4.tgz", + "integrity": "sha512-i11VH5gS6IFeLY3gMBQ00/MmLncVP7JLXOw1vlgkytLmJK7QnEr7NXf0LBdxfmNPAeyetukOk0bOYrJrFGjYJQ==", + "dev": true, + "engines": { + "node": ">=14.0.0" + } + }, "node_modules/tinyqueue": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/tinyqueue/-/tinyqueue-2.0.3.tgz", "integrity": "sha512-ppJZNDuKGgxzkHihX8v9v9G5f+18gzaTfrukGrq6ueg0lmH4nqVnA2IPG0AEH3jKEk2GRJCUhDoqpoiw3PHLBA==" }, + "node_modules/tinyspy": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/tinyspy/-/tinyspy-2.2.1.tgz", + "integrity": "sha512-KYad6Vy5VDWV4GH3fjpseMQ/XU2BhIYP7Vzd0LG44qRWm/Yt2WCOTicFdvmgo6gWaqooMQCawTtILVQJupKu7A==", + "dev": true, + "engines": { + "node": ">=14.0.0" + } + }, "node_modules/tmp": { "version": "0.0.33", "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz", @@ -10876,6 +12774,39 @@ "topoquantize": "bin/topoquantize" } }, + "node_modules/totalist": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/totalist/-/totalist-3.0.1.tgz", + "integrity": "sha512-sf4i37nQ2LBx4m3wB74y+ubopq6W/dIzXg0FDGjsYnZHVa1Da8FH853wlL2gtUhg+xJXjfk3kUZS3BRoQeoQBQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/tough-cookie": { + "version": "4.1.4", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.1.4.tgz", + "integrity": "sha512-Loo5UUvLD9ScZ6jh8beX1T6sO1w2/MpCRpEP7V280GKMVUQ0Jzar2U3UJPsrdbziLEMMhu3Ujnq//rhiFuIeag==", + "dev": true, + "dependencies": { + "psl": "^1.1.33", + "punycode": "^2.1.1", + "universalify": "^0.2.0", + "url-parse": "^1.5.3" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/tough-cookie/node_modules/universalify": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.2.0.tgz", + "integrity": "sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg==", + "dev": true, + "engines": { + "node": ">= 4.0.0" + } + }, "node_modules/tr46": { "version": "0.0.3", "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", @@ -10940,6 +12871,17 @@ "resolved": "https://registry.npmjs.org/ts-easing/-/ts-easing-0.2.0.tgz", "integrity": "sha512-Z86EW+fFFh/IFB1fqQ3/+7Zpf9t2ebOAxNI/V6Wo7r5gqiqtxmgTlQ1qbqQcjLKYeSHPTsEmvlJUDg/EuL0uHQ==" }, + "node_modules/ts-invariant": { + "version": "0.10.3", + "resolved": "https://registry.npmjs.org/ts-invariant/-/ts-invariant-0.10.3.tgz", + "integrity": "sha512-uivwYcQaxAucv1CzRp2n/QdYPo4ILf9VXgH19zEIjFx2EJufV16P0JtJVpYHy89DItG6Kwj2oIUjrcK5au+4tQ==", + "dependencies": { + "tslib": "^2.1.0" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/ts-mixer": { "version": "6.0.3", "resolved": "https://registry.npmjs.org/ts-mixer/-/ts-mixer-6.0.3.tgz", @@ -10985,6 +12927,15 @@ "node": ">= 0.8.0" } }, + "node_modules/type-detect": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", + "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==", + "dev": true, + "engines": { + "node": ">=4" + } + }, "node_modules/type-fest": { "version": "0.20.2", "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", @@ -11118,6 +13069,12 @@ "node": "*" } }, + "node_modules/ufo": { + "version": "1.5.3", + "resolved": "https://registry.npmjs.org/ufo/-/ufo-1.5.3.tgz", + "integrity": "sha512-Y7HYmWaFwPUmkoQCUIAYpKqkOf+SbVj/2fJJZ4RJMCfZp0rTGwRbzQD+HghfnhKOjL9E01okqz+ncJskGYfBNw==", + "dev": true + }, "node_modules/unbox-primitive": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.2.tgz", @@ -11171,6 +13128,35 @@ "resolved": "https://registry.npmjs.org/unraw/-/unraw-3.0.0.tgz", "integrity": "sha512-08/DA66UF65OlpUDIQtbJyrqTR0jTAlJ+jsnkQ4jxR7+K5g5YG1APZKQSMCE1vqqmD+2pv6+IdEjmopFatacvg==" }, + "node_modules/update-browserslist-db": { + "version": "1.0.16", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.0.16.tgz", + "integrity": "sha512-KVbTxlBYlckhF5wgfyZXTWnMn7MMZjMu9XG8bPlliUOP9ThaF4QnhP8qrjrH7DRzHfSk0oQv1wToW+iA5GajEQ==", + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "dependencies": { + "escalade": "^3.1.2", + "picocolors": "^1.0.1" + }, + "bin": { + "update-browserslist-db": "cli.js" + }, + "peerDependencies": { + "browserslist": ">= 4.21.0" + } + }, "node_modules/update-diff": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/update-diff/-/update-diff-1.1.0.tgz", @@ -11335,6 +13321,93 @@ } } }, + "node_modules/vite-node": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/vite-node/-/vite-node-1.6.0.tgz", + "integrity": "sha512-de6HJgzC+TFzOu0NTC4RAIsyf/DY/ibWDYQUcuEA84EMHhcefTUGkjFHKKEJhQN4A+6I0u++kr3l36ZF2d7XRw==", + "dev": true, + "dependencies": { + "cac": "^6.7.14", + "debug": "^4.3.4", + "pathe": "^1.1.1", + "picocolors": "^1.0.0", + "vite": "^5.0.0" + }, + "bin": { + "vite-node": "vite-node.mjs" + }, + "engines": { + "node": "^18.0.0 || >=20.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/vitest": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/vitest/-/vitest-1.6.0.tgz", + "integrity": "sha512-H5r/dN06swuFnzNFhq/dnz37bPXnq8xB2xB5JOVk8K09rUtoeNN+LHWkoQ0A/i3hvbUKKcCei9KpbxqHMLhLLA==", + "dev": true, + "dependencies": { + "@vitest/expect": "1.6.0", + "@vitest/runner": "1.6.0", + "@vitest/snapshot": "1.6.0", + "@vitest/spy": "1.6.0", + "@vitest/utils": "1.6.0", + "acorn-walk": "^8.3.2", + "chai": "^4.3.10", + "debug": "^4.3.4", + "execa": "^8.0.1", + "local-pkg": "^0.5.0", + "magic-string": "^0.30.5", + "pathe": "^1.1.1", + "picocolors": "^1.0.0", + "std-env": "^3.5.0", + "strip-literal": "^2.0.0", + "tinybench": "^2.5.1", + "tinypool": "^0.8.3", + "vite": "^5.0.0", + "vite-node": "1.6.0", + "why-is-node-running": "^2.2.2" + }, + "bin": { + "vitest": "vitest.mjs" + }, + "engines": { + "node": "^18.0.0 || >=20.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + }, + "peerDependencies": { + "@edge-runtime/vm": "*", + "@types/node": "^18.0.0 || >=20.0.0", + "@vitest/browser": "1.6.0", + "@vitest/ui": "1.6.0", + "happy-dom": "*", + "jsdom": "*" + }, + "peerDependenciesMeta": { + "@edge-runtime/vm": { + "optional": true + }, + "@types/node": { + "optional": true + }, + "@vitest/browser": { + "optional": true + }, + "@vitest/ui": { + "optional": true + }, + "happy-dom": { + "optional": true + }, + "jsdom": { + "optional": true + } + } + }, "node_modules/void-elements": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/void-elements/-/void-elements-3.1.0.tgz", @@ -11353,6 +13426,18 @@ "pbf": "^3.2.1" } }, + "node_modules/w3c-xmlserializer": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/w3c-xmlserializer/-/w3c-xmlserializer-5.0.0.tgz", + "integrity": "sha512-o8qghlI8NZHU1lLPrpi2+Uq7abh4GGPpYANlalzWxyWteJOCsr/P+oPBA49TOLu5FTZO4d3F9MnWJfiMo4BkmA==", + "dev": true, + "dependencies": { + "xml-name-validator": "^5.0.0" + }, + "engines": { + "node": ">=18" + } + }, "node_modules/weak-map": { "version": "1.0.8", "resolved": "https://registry.npmjs.org/weak-map/-/weak-map-1.0.8.tgz", @@ -11385,6 +13470,39 @@ "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==" }, + "node_modules/whatwg-encoding": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/whatwg-encoding/-/whatwg-encoding-3.1.1.tgz", + "integrity": "sha512-6qN4hJdMwfYBtE3YBTTHhoeuUrDBPZmbQaxWAqSALV/MeEnR5z1xd8UKud2RAkFoPkmB+hli1TZSnyi84xz1vQ==", + "dev": true, + "dependencies": { + "iconv-lite": "0.6.3" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/whatwg-encoding/node_modules/iconv-lite": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "dev": true, + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/whatwg-mimetype": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/whatwg-mimetype/-/whatwg-mimetype-4.0.0.tgz", + "integrity": "sha512-QaKxh0eNIi2mE9p2vEdzfagOKHCcj1pJ56EEHGQOVxp8r9/iszLUUV7v89x9O1p/T+NlTM5W7jW6+cz4Fq1YVg==", + "dev": true, + "engines": { + "node": ">=18" + } + }, "node_modules/whatwg-url": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", @@ -11484,6 +13602,22 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/why-is-node-running": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/why-is-node-running/-/why-is-node-running-2.2.2.tgz", + "integrity": "sha512-6tSwToZxTOcotxHeA+qGCq1mVzKR3CwcJGmVcY+QE8SHy6TnpFnh8PAvPNHYr7EcuVeG0QSMxtYCuO1ta/G/oA==", + "dev": true, + "dependencies": { + "siginfo": "^2.0.0", + "stackback": "0.0.2" + }, + "bin": { + "why-is-node-running": "cli.js" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/world-calendars": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/world-calendars/-/world-calendars-1.0.3.tgz", @@ -11497,6 +13631,27 @@ "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" }, + "node_modules/ws": { + "version": "8.17.1", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.17.1.tgz", + "integrity": "sha512-6XQFvXTkbfUOZOKKILFG1PDK2NDQs4azKQl26T0YS5CxqWLgXajbPZ+h4gZekJyRqFU8pvnbAbbs/3TgRPy+GQ==", + "dev": true, + "engines": { + "node": ">=10.0.0" + }, + "peerDependencies": { + "bufferutil": "^4.0.1", + "utf-8-validate": ">=5.0.2" + }, + "peerDependenciesMeta": { + "bufferutil": { + "optional": true + }, + "utf-8-validate": { + "optional": true + } + } + }, "node_modules/xml": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/xml/-/xml-1.0.1.tgz", @@ -11521,6 +13676,21 @@ "xml-js": "bin/cli.js" } }, + "node_modules/xml-name-validator": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/xml-name-validator/-/xml-name-validator-5.0.0.tgz", + "integrity": "sha512-EvGK8EJ3DhaHfbRlETOWAS5pO9MZITeauHKJyb8wyajUfQUenkIg2MvLDTZ4T/TgIcm3HU0TFBgWWboAZ30UHg==", + "dev": true, + "engines": { + "node": ">=18" + } + }, + "node_modules/xmlchars": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/xmlchars/-/xmlchars-2.2.0.tgz", + "integrity": "sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw==", + "dev": true + }, "node_modules/xtend": { "version": "4.0.2", "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", @@ -11546,7 +13716,6 @@ "version": "0.1.0", "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", - "dev": true, "engines": { "node": ">=10" }, diff --git a/webapp/package.json b/webapp/package.json index a53b548a9f..98bfa9a549 100644 --- a/webapp/package.json +++ b/webapp/package.json @@ -1,6 +1,6 @@ { "name": "antares-web", - "version": "2.17.2", + "version": "2.17.3", "private": true, "type": "module", "scripts": { @@ -8,11 +8,15 @@ "clean": "rm -rf dist node_modules/.vite", "dev": "vite", "lint": "tsc --noEmit && eslint . --ext ts,tsx --report-unused-disable-directives", - "preview": "vite preview" + "preview": "vite preview", + "test": "vitest", + "test:ui": "vitest --ui", + "coverage": "vitest run --coverage" }, "dependencies": { "@emotion/react": "11.11.1", "@emotion/styled": "11.11.0", + "@glideapps/glide-data-grid": "6.0.3", "@handsontable/react": "14.1.0", "@mui/icons-material": "5.14.11", "@mui/lab": "5.0.0-alpha.146", @@ -68,6 +72,9 @@ "xml-js": "1.6.11" }, "devDependencies": { + "@testing-library/jest-dom": "6.4.6", + "@testing-library/react": "16.0.0", + "@testing-library/user-event": "14.5.2", "@total-typescript/ts-reset": "0.5.1", "@types/d3": "5.16.0", "@types/debug": "4.1.9", @@ -89,10 +96,13 @@ "@types/react-window": "1.8.6", "@types/redux-logger": "3.0.10", "@types/swagger-ui-react": "4.18.1", + "@types/testing-library__jest-dom": "6.0.0", "@types/uuid": "9.0.4", "@typescript-eslint/eslint-plugin": "6.14.0", "@typescript-eslint/parser": "6.14.0", "@vitejs/plugin-react-swc": "3.5.0", + "@vitest/coverage-v8": "1.6.0", + "@vitest/ui": "1.6.0", "eslint": "8.55.0", "eslint-config-prettier": "9.0.0", "eslint-plugin-jsdoc": "48.2.0", @@ -101,9 +111,11 @@ "eslint-plugin-react-hooks": "4.6.0", "eslint-plugin-react-refresh": "0.4.5", "husky": "8.0.3", + "jsdom": "24.1.0", "prettier": "3.0.3", "typescript": "5.2.2", - "vite": "5.0.8" + "vite": "5.0.8", + "vitest": "1.6.0" }, "engines": { "node": "18.16.1" diff --git a/webapp/src/components/App/Singlestudy/HomeView/InformationView/CreateVariantDialog.tsx b/webapp/src/components/App/Singlestudy/HomeView/InformationView/CreateVariantDialog.tsx index 9d46ac4550..7c23b14a4f 100644 --- a/webapp/src/components/App/Singlestudy/HomeView/InformationView/CreateVariantDialog.tsx +++ b/webapp/src/components/App/Singlestudy/HomeView/InformationView/CreateVariantDialog.tsx @@ -58,7 +58,7 @@ function CreateVariantDialog(props: Props) { return ( {({ control }) => ( -
+
({ label: ver.name, value: ver.id, }))} - name="sourceId" control={control} - required + rules={{ required: true }} />
)} diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Renewables/index.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Renewables/index.tsx index d6f987753a..97637987d3 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Renewables/index.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Renewables/index.tsx @@ -20,6 +20,7 @@ import { addClusterCapacity, capacityAggregationFn, getClustersWithCapacityTotals, + toCapacityString, } from "../common/clustersUtils"; import { TRow } from "../../../../../../common/GroupedDataTable/types"; import BooleanCell from "../../../../../../common/GroupedDataTable/cellRenderers/BooleanCell"; @@ -80,29 +81,26 @@ function Renewables() { columnHelper.accessor("nominalCapacity", { header: "Nominal Capacity (MW)", size: 220, - Cell: ({ cell }) => Math.floor(cell.getValue()), - }), - columnHelper.accessor("installedCapacity", { - header: "Enabled / Installed (MW)", - size: 220, - aggregationFn: capacityAggregationFn(), - AggregatedCell: ({ cell }) => ( - - {cell.getValue() ?? ""} - - ), - Cell: ({ row }) => ( - <> - {Math.floor(row.original.enabledCapacity)} /{" "} - {Math.floor(row.original.installedCapacity)} - - ), - Footer: () => ( - - {totalEnabledCapacity} / {totalInstalledCapacity} - - ), + Cell: ({ cell }) => cell.getValue().toFixed(1), }), + columnHelper.accessor( + (row) => toCapacityString(row.enabledCapacity, row.installedCapacity), + { + header: "Enabled / Installed (MW)", + size: 220, + aggregationFn: capacityAggregationFn(), + AggregatedCell: ({ cell }) => ( + + {cell.getValue()} + + ), + Footer: () => ( + + {toCapacityString(totalEnabledCapacity, totalInstalledCapacity)} + + ), + }, + ), ]; }, [totals]); diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/index.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/index.tsx index 69193af96d..6c19931d74 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/index.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Storages/index.tsx @@ -69,13 +69,13 @@ function Storages() { aggregationFn: "sum", AggregatedCell: ({ cell }) => ( - {Math.floor(cell.getValue())} + {Math.round(cell.getValue())} ), - Cell: ({ cell }) => Math.floor(cell.getValue()), + Cell: ({ cell }) => Math.round(cell.getValue()), Footer: () => ( - {Math.floor(totalInjectionNominalCapacity)} + {Math.round(totalInjectionNominalCapacity)} ), }), @@ -96,13 +96,13 @@ function Storages() { aggregationFn: "sum", AggregatedCell: ({ cell }) => ( - {Math.floor(cell.getValue())} + {Math.round(cell.getValue())} ), - Cell: ({ cell }) => Math.floor(cell.getValue()), + Cell: ({ cell }) => Math.round(cell.getValue()), Footer: () => ( - {Math.floor(totalWithdrawalNominalCapacity)} + {Math.round(totalWithdrawalNominalCapacity)} ), }), @@ -123,12 +123,12 @@ function Storages() { columnHelper.accessor("efficiency", { header: t("study.modelization.storages.efficiency"), size: 50, - Cell: ({ cell }) => `${Math.floor(cell.getValue() * 100)}`, + Cell: ({ cell }) => `${Math.round(cell.getValue() * 100)}`, }), columnHelper.accessor("initialLevel", { header: t("study.modelization.storages.initialLevel"), size: 50, - Cell: ({ cell }) => `${Math.floor(cell.getValue() * 100)}`, + Cell: ({ cell }) => `${Math.round(cell.getValue() * 100)}`, }), columnHelper.accessor("initialLevelOptim", { header: t("study.modelization.storages.initialLevelOptim"), diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/index.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/index.tsx index 0b36a6903a..3a7cfd6801 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/index.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/Thermal/index.tsx @@ -20,6 +20,7 @@ import { addClusterCapacity, capacityAggregationFn, getClustersWithCapacityTotals, + toCapacityString, } from "../common/clustersUtils"; import { TRow } from "../../../../../../common/GroupedDataTable/types"; import BooleanCell from "../../../../../../common/GroupedDataTable/cellRenderers/BooleanCell"; @@ -84,27 +85,24 @@ function Thermal() { size: 220, Cell: ({ cell }) => cell.getValue().toFixed(1), }), - columnHelper.accessor("installedCapacity", { - header: "Enabled / Installed (MW)", - size: 220, - aggregationFn: capacityAggregationFn(), - AggregatedCell: ({ cell }) => ( - - {cell.getValue() ?? ""} - - ), - Cell: ({ row }) => ( - <> - {Math.floor(row.original.enabledCapacity)} /{" "} - {Math.floor(row.original.installedCapacity)} - - ), - Footer: () => ( - - {totalEnabledCapacity} / {totalInstalledCapacity} - - ), - }), + columnHelper.accessor( + (row) => toCapacityString(row.enabledCapacity, row.installedCapacity), + { + header: "Enabled / Installed (MW)", + size: 220, + aggregationFn: capacityAggregationFn(), + AggregatedCell: ({ cell }) => ( + + {cell.getValue()} + + ), + Footer: () => ( + + {toCapacityString(totalEnabledCapacity, totalInstalledCapacity)} + + ), + }, + ), columnHelper.accessor("marketBidCost", { header: "Market Bid (€/MWh)", size: 50, diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/common/clustersUtils.ts b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/common/clustersUtils.ts index a035dfa07f..3d5c80dd30 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/common/clustersUtils.ts +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/common/clustersUtils.ts @@ -2,6 +2,13 @@ import { MRT_AggregationFn } from "material-react-table"; import { ThermalClusterWithCapacity } from "../Thermal/utils"; import { RenewableClusterWithCapacity } from "../Renewables/utils"; +export function toCapacityString( + enabledCapacity: number, + installedCapacity: number, +) { + return `${Math.round(enabledCapacity)} / ${Math.round(installedCapacity)}`; +} + /** * Custom aggregation function summing the values of each row, * to display enabled and installed capacity in the same cell. This function is @@ -29,9 +36,7 @@ export const capacityAggregationFn = < { enabledCapacitySum: 0, installedCapacitySum: 0 }, ); - return `${Math.floor(enabledCapacitySum)} / ${Math.floor( - installedCapacitySum, - )}`; + return toCapacityString(enabledCapacitySum, installedCapacitySum); }; }; diff --git a/webapp/src/components/common/Fieldset.tsx b/webapp/src/components/common/Fieldset.tsx index cd72fa8943..442e4078b6 100644 --- a/webapp/src/components/common/Fieldset.tsx +++ b/webapp/src/components/common/Fieldset.tsx @@ -40,6 +40,10 @@ function Fieldset(props: FieldsetProps) { m: 0, }, }, + // Remove padding from the last child of the dialog content + ".MuiDialogContent-root .Form__Content > &:last-child": { + pb: 0, + }, }, sx, )} @@ -59,7 +63,7 @@ function Fieldset(props: FieldsetProps) { )} - + {children} diff --git a/webapp/src/components/common/Form/index.tsx b/webapp/src/components/common/Form/index.tsx index 7c4958bb7f..c14bc783be 100644 --- a/webapp/src/components/common/Form/index.tsx +++ b/webapp/src/components/common/Form/index.tsx @@ -1,5 +1,5 @@ /* eslint-disable @typescript-eslint/no-explicit-any */ -import { FormEvent, useEffect, useMemo, useRef } from "react"; +import { FormEvent, useEffect, useMemo, useRef, useState } from "react"; import { DeepPartial, FieldPath, @@ -116,6 +116,7 @@ function Form( const { t } = useTranslation(); const autoSubmitConfig = toAutoSubmitConfig(autoSubmit); + const [isInProgress, setIsInProgress] = useState(false); const [showAutoSubmitLoader, setShowAutoSubmitLoader] = useDebouncedState( false, 750, @@ -130,7 +131,6 @@ function Form( const lastSubmittedData = useRef(); // eslint-disable-next-line @typescript-eslint/no-empty-function const submitSuccessfulCb = useRef(() => {}); - const preventClose = useRef(false); const contextValue = useMemo( () => ({ isAutoSubmitEnabled: autoSubmitConfig.enable }), @@ -224,7 +224,7 @@ function Form( // Prevent browser close if a submit is pending useEffect(() => { const listener = (event: BeforeUnloadEvent) => { - if (preventClose.current) { + if (isInProgress) { // eslint-disable-next-line no-param-reassign event.returnValue = t("form.submit.inProgress"); } else if (isDirty) { @@ -238,14 +238,14 @@ function Form( return () => { window.removeEventListener("beforeunload", listener); }; - }, [t, isDirty]); + }, [t, isInProgress, isDirty]); useUpdateEffect(() => onStateChange?.(formState), [formState]); useEffect(() => setRef(apiRef, formApiPlus)); - usePrompt(t("form.submit.inProgress"), preventClose.current); - usePrompt(t("form.changeNotSaved"), isDirty); + usePrompt(t("form.submit.inProgress"), isInProgress); + usePrompt(t("form.changeNotSaved"), isDirty && !isInProgress); //////////////////////////////////////////////////////////////// // Submit @@ -299,7 +299,7 @@ function Form( }); }) .finally(() => { - preventClose.current = false; + setIsInProgress(false); }); }, onInvalid); @@ -309,7 +309,8 @@ function Form( const submitDebounced = useDebounce(submit, autoSubmitConfig.wait); const requestSubmit = () => { - preventClose.current = true; + setIsInProgress(true); + if (autoSubmitConfig.enable) { submitDebounced(); } else { diff --git a/webapp/src/index.test.tsx b/webapp/src/index.test.tsx new file mode 100644 index 0000000000..6317806c00 --- /dev/null +++ b/webapp/src/index.test.tsx @@ -0,0 +1,19 @@ +import { render } from "@testing-library/react"; +import { Provider } from "react-redux"; +import { StyledEngineProvider } from "@mui/material"; +import App from "./components/App"; +import store from "./redux/store"; + +describe("Application Render", () => { + test("renders the App component with providers", () => { + const { getByText } = render( + + + + + , + ); + + expect(getByText("Antares Web")).toBeInTheDocument(); + }); +}); diff --git a/webapp/src/tests/setup.ts b/webapp/src/tests/setup.ts new file mode 100644 index 0000000000..d1e9ef6bae --- /dev/null +++ b/webapp/src/tests/setup.ts @@ -0,0 +1,17 @@ +import * as matchers from "@testing-library/jest-dom/matchers"; +import "@testing-library/jest-dom"; +import { cleanup } from "@testing-library/react"; +import { expect } from "vitest"; + +// Extend Vitest's expect function with jest-dom matchers for enhanced DOM assertions. +expect.extend(matchers); + +afterEach(() => { + cleanup(); +}); + +// Additional setup can include: +// - Mocks: Define global mocks for browser APIs like localStorage, fetch, etc. +// - Global Test Data: Setup common data used across multiple test files. +// - Configuration Settings: Adjust global settings for tests, such as timeouts or environment variables. +// - Cleanup: Implement global afterEach or beforeEach hooks for cleanup and setup between tests. diff --git a/webapp/tsconfig.json b/webapp/tsconfig.json index 1070a7d006..3a20f675ad 100644 --- a/webapp/tsconfig.json +++ b/webapp/tsconfig.json @@ -13,6 +13,7 @@ "isolatedModules": true, "noEmit": true, "jsx": "react-jsx", + "types": ["vitest/globals", "vitest/jsdom"], /* Linting */ "strict": true, diff --git a/webapp/tsconfig.node.json b/webapp/tsconfig.node.json index 42872c59f5..b1b8569880 100644 --- a/webapp/tsconfig.node.json +++ b/webapp/tsconfig.node.json @@ -4,7 +4,8 @@ "skipLibCheck": true, "module": "ESNext", "moduleResolution": "bundler", - "allowSyntheticDefaultImports": true + "allowSyntheticDefaultImports": true, + "types": ["vitest/globals", "vitest/jsdom"] }, "include": ["vite.config.ts"] } diff --git a/webapp/vite.config.ts b/webapp/vite.config.ts index 614e70c546..7cce8d327b 100644 --- a/webapp/vite.config.ts +++ b/webapp/vite.config.ts @@ -1,3 +1,4 @@ +/// import { defineConfig } from "vite"; import react from "@vitejs/plugin-react-swc"; @@ -8,7 +9,7 @@ export default defineConfig(({ mode }) => { const isDesktopMode = mode === "desktop"; return { - // Serve the web app at the `/static` entry point on Desktop mode + // Serve the web app at the `/static` entry point on Desktop mode (cf. 'antarest/main.py') base: isDesktopMode ? "/static/" : "/", esbuild: { // Remove logs safely when building production bundle @@ -28,5 +29,11 @@ export default defineConfig(({ mode }) => { }, }, }, + test: { + globals: true, // Use the APIs globally + environment: "jsdom", + css: true, + setupFiles: "./src/tests/setup.ts", + }, }; });