diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index b67a63c435..1338b744f4 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -42,7 +42,7 @@ jobs: - name: 🐍 Set up Python uses: actions/setup-python@v5 with: - python-version: 3.8 + python-version: 3.11 - name: 🐍 Install development dependencies run: | diff --git a/.github/workflows/license_header.yml b/.github/workflows/license_header.yml index 51b8172825..cf577e51c9 100644 --- a/.github/workflows/license_header.yml +++ b/.github/workflows/license_header.yml @@ -13,7 +13,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: 3.8 + python-version: 3.11 - name: Install dependencies run: | python -m pip install --upgrade pip diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 7a590a8214..0e41bf08b1 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -13,7 +13,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: 3.8 + python-version: 3.11 - name: Install dependencies run: | python -m pip install --upgrade pip @@ -46,7 +46,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: 3.8 + python-version: 3.11 - name: Install dependencies run: | python -m pip install --upgrade pip diff --git a/.github/workflows/worker.yml b/.github/workflows/worker.yml index 4839670d62..bbaa285c78 100644 --- a/.github/workflows/worker.yml +++ b/.github/workflows/worker.yml @@ -18,7 +18,7 @@ jobs: - name: 🐍 Set up Python uses: actions/setup-python@v5 with: - python-version: 3.8 + python-version: 3.11 - name: 🐍 Install dependencies run: | diff --git a/Dockerfile b/Dockerfile index 3cdb34dd06..4004b4f70d 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.8-slim-bullseye +FROM python:3.11-slim-bullseye # RUN apt update && apt install -y procps gdb @@ -16,10 +16,6 @@ COPY ./scripts /scripts COPY ./alembic /alembic COPY ./alembic.ini /alembic.ini -RUN ./scripts/install-debug.sh - -RUN pip3 install --no-cache-dir --upgrade pip \ - && pip3 install --no-cache-dir -r /conf/requirements.txt - +RUN pip3 install --no-cache-dir --upgrade pip && pip3 install --no-cache-dir -r /conf/requirements.txt ENTRYPOINT ["./scripts/start.sh"] diff --git a/antarest/core/config.py b/antarest/core/config.py index b88e4f44bb..0ff5082d54 100644 --- a/antarest/core/config.py +++ b/antarest/core/config.py @@ -13,7 +13,7 @@ import multiprocessing import tempfile from dataclasses import asdict, dataclass, field -from enum import Enum +from enum import StrEnum from pathlib import Path from typing import Dict, List, Optional @@ -25,7 +25,7 @@ DEFAULT_WORKSPACE_NAME = "default" -class Launcher(str, Enum): +class Launcher(StrEnum): SLURM = "slurm" LOCAL = "local" DEFAULT = "default" diff --git a/antarest/core/configdata/model.py b/antarest/core/configdata/model.py index bd243387ec..3a4512e44c 100644 --- a/antarest/core/configdata/model.py +++ b/antarest/core/configdata/model.py @@ -10,7 +10,7 @@ # # This file is part of the Antares project. -from enum import Enum +from enum import StrEnum from typing import Any, Optional from sqlalchemy import Column, Integer, String # type: ignore @@ -43,6 +43,6 @@ def to_dto(self) -> ConfigDataDTO: # APP MAIN CONFIG KEYS -class ConfigDataAppKeys(str, Enum): +class ConfigDataAppKeys(StrEnum): MAINTENANCE_MODE = "MAINTENANCE_MODE" MESSAGE_INFO = "MESSAGE_INFO" diff --git a/antarest/core/interfaces/eventbus.py b/antarest/core/interfaces/eventbus.py index c6e36a4b80..30771baff4 100644 --- a/antarest/core/interfaces/eventbus.py +++ b/antarest/core/interfaces/eventbus.py @@ -11,14 +11,14 @@ # This file is part of the Antares project. from abc import ABC, abstractmethod -from enum import Enum +from enum import StrEnum from typing import Any, Awaitable, Callable, List, Optional from antarest.core.model import PermissionInfo from antarest.core.serialization import AntaresBaseModel -class EventType(str, Enum): +class EventType(StrEnum): ANY = "_ANY" STUDY_CREATED = "STUDY_CREATED" STUDY_DELETED = "STUDY_DELETED" diff --git a/antarest/core/maintenance/model.py b/antarest/core/maintenance/model.py index c133e4ea9f..c17beadbc3 100644 --- a/antarest/core/maintenance/model.py +++ b/antarest/core/maintenance/model.py @@ -10,10 +10,10 @@ # # This file is part of the Antares project. -from enum import Enum +from enum import StrEnum -class MaintenanceMode(str, Enum): +class MaintenanceMode(StrEnum): NORMAL_MODE = "NORMAL" MAINTENANCE_MODE = "MAINTENANCE" diff --git a/antarest/core/model.py b/antarest/core/model.py index dd4ea511aa..78aa7a1e82 100644 --- a/antarest/core/model.py +++ b/antarest/core/model.py @@ -24,7 +24,7 @@ SUB_JSON = Union[ELEMENT, JSON, List[Any], None] -class PublicMode(str, enum.Enum): +class PublicMode(enum.StrEnum): NONE = "NONE" READ = "READ" EXECUTE = "EXECUTE" @@ -32,7 +32,7 @@ class PublicMode(str, enum.Enum): FULL = "FULL" -class StudyPermissionType(str, enum.Enum): +class StudyPermissionType(enum.StrEnum): """ User permission belongs to Study """ diff --git a/antarest/core/tasks/model.py b/antarest/core/tasks/model.py index 10f7ef0d34..7da1d201ad 100644 --- a/antarest/core/tasks/model.py +++ b/antarest/core/tasks/model.py @@ -13,7 +13,7 @@ import typing as t import uuid from datetime import datetime -from enum import Enum +from enum import Enum, StrEnum from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, Sequence, String # type: ignore from sqlalchemy.engine.base import Engine # type: ignore @@ -28,7 +28,7 @@ from antarest.study.model import Study -class TaskType(str, Enum): +class TaskType(StrEnum): EXPORT = "EXPORT" VARIANT_GENERATION = "VARIANT_GENERATION" COPY = "COPY" diff --git a/antarest/core/utils/archives.py b/antarest/core/utils/archives.py index a128cd4438..2356653b78 100644 --- a/antarest/core/utils/archives.py +++ b/antarest/core/utils/archives.py @@ -15,7 +15,7 @@ import tempfile import typing as t import zipfile -from enum import Enum +from enum import StrEnum from pathlib import Path import py7zr @@ -25,7 +25,7 @@ logger = logging.getLogger(__name__) -class ArchiveFormat(str, Enum): +class ArchiveFormat(StrEnum): ZIP = ".zip" SEVEN_ZIP = ".7z" diff --git a/antarest/eventbus/web.py b/antarest/eventbus/web.py index 7f7050a793..d2d9405235 100644 --- a/antarest/eventbus/web.py +++ b/antarest/eventbus/web.py @@ -12,7 +12,7 @@ import dataclasses import logging -from enum import Enum +from enum import StrEnum from http import HTTPStatus from typing import List, Optional @@ -32,7 +32,7 @@ logger = logging.getLogger(__name__) -class WebsocketMessageAction(str, Enum): +class WebsocketMessageAction(StrEnum): SUBSCRIBE = "SUBSCRIBE" UNSUBSCRIBE = "UNSUBSCRIBE" diff --git a/antarest/gui.py b/antarest/gui.py index 4926e011af..86ddd5efd1 100644 --- a/antarest/gui.py +++ b/antarest/gui.py @@ -9,6 +9,18 @@ # SPDX-License-Identifier: MPL-2.0 # # This file is part of the Antares project. + +import os +import sys + +# The Pyinstaller version we use has a known issue on windows and to fix it we need to implement this workaround. +# See issue description and workaround on pyinstaller website: +# https://pyinstaller.org/en/stable/common-issues-and-pitfalls.html#sys-stdin-sys-stdout-and-sys-stderr-in-noconsole-windowed-applications-windows-only +if sys.stdout is None: + sys.stdout = open(os.devnull, "w") +if sys.stderr is None: + sys.stderr = open(os.devnull, "w") + import argparse import multiprocessing diff --git a/antarest/launcher/adapters/abstractlauncher.py b/antarest/launcher/adapters/abstractlauncher.py index 48f18e78e0..ac36176300 100644 --- a/antarest/launcher/adapters/abstractlauncher.py +++ b/antarest/launcher/adapters/abstractlauncher.py @@ -116,6 +116,6 @@ def update_log(log_line: str) -> None: channel=EventChannelDirectory.JOB_STATUS + job_id, ) ) - self.cache.put(f"Launch_Progress_{job_id}", launch_progress_dto.model_dump()) + self.cache.put(f"Launch_Progress_{job_id}", launch_progress_dto.model_dump(mode="json")) return update_log diff --git a/antarest/launcher/model.py b/antarest/launcher/model.py index d053a55c85..dbd90e3ef4 100644 --- a/antarest/launcher/model.py +++ b/antarest/launcher/model.py @@ -57,7 +57,7 @@ def from_launcher_params(cls, params: t.Optional[str]) -> "LauncherParametersDTO return cls.model_validate(from_json(params)) -class LogType(str, enum.Enum): +class LogType(enum.StrEnum): STDOUT = "STDOUT" STDERR = "STDERR" @@ -79,14 +79,14 @@ def to_suffix(self) -> str: return "out.log" -class JobStatus(str, enum.Enum): +class JobStatus(enum.StrEnum): PENDING = "pending" FAILED = "failed" SUCCESS = "success" RUNNING = "running" -class JobLogType(str, enum.Enum): +class JobLogType(enum.StrEnum): BEFORE = "BEFORE" AFTER = "AFTER" @@ -139,7 +139,7 @@ def json_schema_extra(schema: t.MutableMapping[str, t.Any]) -> None: exit_code=0, solver_stats="time: 1651s, call_count: 1, optimization_issues: []", owner=UserInfo(id=0o007, name="James BOND"), - ).model_dump() + ).model_dump(mode="json") class JobLog(Base): # type: ignore diff --git a/antarest/launcher/service.py b/antarest/launcher/service.py index 573aaf3b9d..e5f78ede3f 100644 --- a/antarest/launcher/service.py +++ b/antarest/launcher/service.py @@ -185,7 +185,7 @@ def update( self.event_bus.push( Event( type=EventType.STUDY_JOB_COMPLETED if final_status else EventType.STUDY_JOB_STATUS_UPDATE, - payload=job_result.to_dto().model_dump(), + payload=job_result.to_dto().model_dump(mode="json"), permissions=PermissionInfo(public_mode=PublicMode.READ), channel=EventChannelDirectory.JOB_STATUS + job_result.id, ) diff --git a/antarest/matrixstore/service.py b/antarest/matrixstore/service.py index 3a10ed09df..1d7bc27955 100644 --- a/antarest/matrixstore/service.py +++ b/antarest/matrixstore/service.py @@ -101,11 +101,7 @@ def get_matrix_id(self, matrix: t.Union[t.List[t.List[float]], str]) -> str: """ # noinspection SpellCheckingInspection if isinstance(matrix, str): - # str.removeprefix() is not available in Python 3.8 - prefix = "matrix://" - if matrix.startswith(prefix): - return matrix[len(prefix) :] - return matrix + return matrix.removeprefix("matrix://") elif isinstance(matrix, list): return self.create(matrix) else: diff --git a/antarest/service_creator.py b/antarest/service_creator.py index fa8086b789..3bee50cc45 100644 --- a/antarest/service_creator.py +++ b/antarest/service_creator.py @@ -12,7 +12,7 @@ import logging import typing as t -from enum import Enum +from enum import StrEnum from pathlib import Path import redis @@ -65,7 +65,7 @@ """ -class Module(str, Enum): +class Module(StrEnum): APP = "app" WATCHER = "watcher" MATRIX_GC = "matrix_gc" diff --git a/antarest/study/business/aggregator_management.py b/antarest/study/business/aggregator_management.py index a040faf1de..df5bbaa30b 100644 --- a/antarest/study/business/aggregator_management.py +++ b/antarest/study/business/aggregator_management.py @@ -12,7 +12,7 @@ import logging import typing as t -from enum import Enum +from enum import StrEnum from pathlib import Path import numpy as np @@ -54,19 +54,19 @@ logger = logging.getLogger(__name__) -class MCRoot(str, Enum): +class MCRoot(StrEnum): MC_IND = "mc-ind" MC_ALL = "mc-all" -class MCIndAreasQueryFile(str, Enum): +class MCIndAreasQueryFile(StrEnum): VALUES = "values" DETAILS = "details" DETAILS_ST_STORAGE = "details-STstorage" DETAILS_RES = "details-res" -class MCAllAreasQueryFile(str, Enum): +class MCAllAreasQueryFile(StrEnum): VALUES = "values" DETAILS = "details" DETAILS_ST_STORAGE = "details-STstorage" @@ -74,11 +74,11 @@ class MCAllAreasQueryFile(str, Enum): ID = "id" -class MCIndLinksQueryFile(str, Enum): +class MCIndLinksQueryFile(StrEnum): VALUES = "values" -class MCAllLinksQueryFile(str, Enum): +class MCAllLinksQueryFile(StrEnum): VALUES = "values" ID = "id" @@ -308,7 +308,7 @@ def _process_df(self, file_path: Path, is_details: bool) -> pd.DataFrame: # loop over the cluster id to extract the values of the actual columns for cluster_id, dummy_component in cluster_dummy_product_cols: for actual_col in actual_cols: - col_values = un_normalized_df[(cluster_id, actual_col, dummy_component)].tolist() # type: ignore + col_values = un_normalized_df[(cluster_id, actual_col, dummy_component)].tolist() new_obj[actual_col] += col_values new_obj[CLUSTER_ID_COL] += [cluster_id for _ in range(df_len)] new_obj[TIME_ID_COL] += list(range(1, df_len + 1)) diff --git a/antarest/study/business/area_management.py b/antarest/study/business/area_management.py index 9712a3daba..d90ec4c554 100644 --- a/antarest/study/business/area_management.py +++ b/antarest/study/business/area_management.py @@ -229,11 +229,11 @@ def from_model( obj = { "average_unsupplied_energy_cost": average_unsupplied_energy_cost, "average_spilled_energy_cost": average_spilled_energy_cost, - **area_folder.optimization.filtering.model_dump(by_alias=False), - **area_folder.optimization.nodal_optimization.model_dump(by_alias=False), + **area_folder.optimization.filtering.model_dump(mode="json", by_alias=False), + **area_folder.optimization.nodal_optimization.model_dump(mode="json", by_alias=False), # adequacy_patch is only available if study version >= 830. **( - area_folder.adequacy_patch.adequacy_patch.model_dump(by_alias=False) + area_folder.adequacy_patch.adequacy_patch.model_dump(mode="json", by_alias=False) if area_folder.adequacy_patch else {} ), @@ -363,7 +363,7 @@ def update_areas_props( for area_id, update_area in update_areas_by_ids.items(): # Update the area properties. old_area = old_areas_by_ids[area_id] - new_area = old_area.copy(update=update_area.model_dump(by_alias=False, exclude_none=True)) + new_area = old_area.copy(update=update_area.model_dump(mode="json", by_alias=False, exclude_none=True)) new_areas_by_ids[area_id] = new_area # Convert the DTO to a configuration object and update the configuration file. diff --git a/antarest/study/business/areas/renewable_management.py b/antarest/study/business/areas/renewable_management.py index f102c6a251..b093aa325f 100644 --- a/antarest/study/business/areas/renewable_management.py +++ b/antarest/study/business/areas/renewable_management.py @@ -65,7 +65,7 @@ def json_schema_extra(schema: t.MutableMapping[str, t.Any]) -> None: unit_count=100, nominal_capacity=1000.0, ts_interpretation="power-generation", - ).model_dump() + ).model_dump(mode="json") class RenewableClusterCreation(RenewableClusterInput): diff --git a/antarest/study/business/areas/st_storage_management.py b/antarest/study/business/areas/st_storage_management.py index 7592d50423..8c1a197861 100644 --- a/antarest/study/business/areas/st_storage_management.py +++ b/antarest/study/business/areas/st_storage_management.py @@ -68,7 +68,7 @@ def json_schema_extra(schema: t.MutableMapping[str, t.Any]) -> None: efficiency=0.94, initial_level=0.5, initial_level_optim=True, - ).model_dump() + ).model_dump(mode="json") class STStorageCreation(STStorageInput): @@ -88,7 +88,7 @@ def validate_name(cls, name: t.Optional[str]) -> str: # noinspection PyUnusedLocal def to_config(self, study_version: StudyVersion) -> STStorageConfigType: - values = self.model_dump(by_alias=False, exclude_none=True) + values = self.model_dump(mode="json", by_alias=False, exclude_none=True) return create_st_storage_config(study_version=study_version, **values) @@ -111,7 +111,7 @@ def json_schema_extra(schema: t.MutableMapping[str, t.Any]) -> None: reservoir_capacity=600, efficiency=0.94, initial_level_optim=True, - ).model_dump() + ).model_dump(mode="json") # ============= @@ -246,7 +246,7 @@ def create_storage_output( config: t.Mapping[str, t.Any], ) -> "STStorageOutput": obj = create_st_storage_config(study_version=study_version, **config, id=cluster_id) - kwargs = obj.model_dump(by_alias=False) + kwargs = obj.model_dump(mode="json", by_alias=False) return STStorageOutput(**kwargs) @@ -390,12 +390,15 @@ def update_storages_props( for storage_id, update_cluster in update_storages_by_ids.items(): # Update the storage cluster properties. old_cluster = old_storages_by_ids[storage_id] - new_cluster = old_cluster.copy(update=update_cluster.model_dump(by_alias=False, exclude_none=True)) + new_cluster = old_cluster.copy( + update=update_cluster.model_dump(mode="json", by_alias=False, exclude_none=True) + ) new_storages_by_areas[area_id][storage_id] = new_cluster # Convert the DTO to a configuration object and update the configuration file. properties = create_st_storage_config( - StudyVersion.parse(study.version), **new_cluster.model_dump(by_alias=False, exclude_none=True) + StudyVersion.parse(study.version), + **new_cluster.model_dump(mode="json", by_alias=False, exclude_none=True), ) path = _STORAGE_LIST_PATH.format(area_id=area_id, storage_id=storage_id) cmd = UpdateConfig( @@ -469,7 +472,7 @@ def update_storage( old_config = create_st_storage_config(study_version, **values) # use Python values to synchronize Config and Form values - new_values = form.model_dump(by_alias=False, exclude_none=True) + new_values = form.model_dump(mode="json", by_alias=False, exclude_none=True) new_config = old_config.copy(exclude={"id"}, update=new_values) new_data = new_config.model_dump(mode="json", by_alias=True, exclude={"id"}) @@ -489,7 +492,7 @@ def update_storage( ] execute_or_add_commands(study, file_study, commands, self.storage_service) - values = new_config.model_dump(by_alias=False) + values = new_config.model_dump(mode="json", by_alias=False) return STStorageOutput(**values, id=storage_id) def delete_storages( @@ -552,7 +555,9 @@ def duplicate_cluster(self, study: Study, area_id: str, source_id: str, new_clus study_version = StudyVersion.parse(study.version) if study_version < STUDY_VERSION_8_8: fields_to_exclude.add("enabled") - creation_form = STStorageCreation(**current_cluster.model_dump(by_alias=False, exclude=fields_to_exclude)) + creation_form = STStorageCreation( + **current_cluster.model_dump(mode="json", by_alias=False, exclude=fields_to_exclude) + ) new_config = creation_form.to_config(study_version) create_cluster_cmd = self._make_create_cluster_cmd(area_id, new_config) @@ -581,7 +586,7 @@ def duplicate_cluster(self, study: Study, area_id: str, source_id: str, new_clus execute_or_add_commands(study, self._get_file_study(study), commands, self.storage_service) - return STStorageOutput(**new_config.model_dump(by_alias=False)) + return STStorageOutput(**new_config.model_dump(mode="json", by_alias=False)) def get_matrix( self, diff --git a/antarest/study/business/areas/thermal_management.py b/antarest/study/business/areas/thermal_management.py index d9679355fd..2786617cc1 100644 --- a/antarest/study/business/areas/thermal_management.py +++ b/antarest/study/business/areas/thermal_management.py @@ -72,7 +72,7 @@ def json_schema_extra(schema: t.MutableMapping[str, t.Any]) -> None: nominal_capacity=1000.0, gen_ts="use global", co2=7.0, - ).model_dump() + ).model_dump(mode="json") @camel_case_model @@ -92,7 +92,7 @@ def validate_name(cls, name: t.Optional[str]) -> str: return name def to_config(self, study_version: StudyVersion) -> ThermalConfigType: - values = self.model_dump(by_alias=False, exclude_none=True) + values = self.model_dump(mode="json", by_alias=False, exclude_none=True) return create_thermal_config(study_version=study_version, **values) @@ -115,7 +115,7 @@ def json_schema_extra(schema: t.MutableMapping[str, t.Any]) -> None: nominal_capacity=1000.0, gen_ts="use global", co2=7.0, - ).model_dump() + ).model_dump(mode="json") def create_thermal_output( @@ -124,7 +124,7 @@ def create_thermal_output( config: t.Mapping[str, t.Any], ) -> "ThermalClusterOutput": obj = create_thermal_config(study_version=study_version, **config, id=cluster_id) - kwargs = obj.model_dump(by_alias=False) + kwargs = obj.model_dump(mode="json", by_alias=False) return ThermalClusterOutput(**kwargs) @@ -255,12 +255,15 @@ def update_thermals_props( for thermal_id, update_cluster in update_thermals_by_ids.items(): # Update the thermal cluster properties. old_cluster = old_thermals_by_ids[thermal_id] - new_cluster = old_cluster.copy(update=update_cluster.model_dump(by_alias=False, exclude_none=True)) + new_cluster = old_cluster.copy( + update=update_cluster.model_dump(mode="json", by_alias=False, exclude_none=True) + ) new_thermals_by_areas[area_id][thermal_id] = new_cluster # Convert the DTO to a configuration object and update the configuration file. properties = create_thermal_config( - StudyVersion.parse(study.version), **new_cluster.model_dump(by_alias=False, exclude_none=True) + StudyVersion.parse(study.version), + **new_cluster.model_dump(mode="json", by_alias=False, exclude_none=True), ) path = _CLUSTER_PATH.format(area_id=area_id, cluster_id=thermal_id) cmd = UpdateConfig( @@ -352,7 +355,7 @@ def update_cluster( old_config = create_thermal_config(study_version, **values) # Use Python values to synchronize Config and Form values - new_values = cluster_data.model_dump(by_alias=False, exclude_none=True) + new_values = cluster_data.model_dump(mode="json", by_alias=False, exclude_none=True) new_config = old_config.copy(exclude={"id"}, update=new_values) new_data = new_config.model_dump(mode="json", by_alias=True, exclude={"id"}) @@ -424,7 +427,7 @@ def duplicate_cluster( # Cluster duplication source_cluster = self.get_cluster(study, area_id, source_id) source_cluster.name = new_cluster_name - creation_form = ThermalClusterCreation(**source_cluster.model_dump(by_alias=False, exclude={"id"})) + creation_form = ThermalClusterCreation(**source_cluster.model_dump(mode="json", by_alias=False, exclude={"id"})) new_config = creation_form.to_config(StudyVersion.parse(study.version)) create_cluster_cmd = self._make_create_cluster_cmd(area_id, new_config) @@ -457,7 +460,7 @@ def duplicate_cluster( execute_or_add_commands(study, self._get_file_study(study), commands, self.storage_service) - return ThermalClusterOutput(**new_config.model_dump(by_alias=False)) + return ThermalClusterOutput(**new_config.model_dump(mode="json", by_alias=False)) def validate_series(self, study: Study, area_id: str, cluster_id: str) -> bool: lower_cluster_id = cluster_id.lower() diff --git a/antarest/study/business/enum_ignore_case.py b/antarest/study/business/enum_ignore_case.py index 2259d229f7..9d0bcf3396 100644 --- a/antarest/study/business/enum_ignore_case.py +++ b/antarest/study/business/enum_ignore_case.py @@ -14,7 +14,7 @@ import typing -class EnumIgnoreCase(str, enum.Enum): +class EnumIgnoreCase(enum.StrEnum): """ Case-insensitive enum base class diff --git a/antarest/study/business/link_management.py b/antarest/study/business/link_management.py index f14c43ef07..54831ad8ac 100644 --- a/antarest/study/business/link_management.py +++ b/antarest/study/business/link_management.py @@ -121,7 +121,7 @@ def get_all_links_props(self, study: RawStudy) -> t.Mapping[t.Tuple[str, str], L for area2_id, properties_cfg in property_map.items(): area1_id, area2_id = sorted([area1_id, area2_id]) properties = LinkProperties(**properties_cfg) - links_by_ids[(area1_id, area2_id)] = LinkOutput(**properties.model_dump(by_alias=False)) + links_by_ids[(area1_id, area2_id)] = LinkOutput(**properties.model_dump(mode="json", by_alias=False)) return links_by_ids @@ -137,7 +137,9 @@ def update_links_props( for (area1, area2), update_link_dto in update_links_by_ids.items(): # Update the link properties. old_link_dto = old_links_by_ids[(area1, area2)] - new_link_dto = old_link_dto.copy(update=update_link_dto.model_dump(by_alias=False, exclude_none=True)) + new_link_dto = old_link_dto.copy( + update=update_link_dto.model_dump(mode="json", by_alias=False, exclude_none=True) + ) new_links_by_ids[(area1, area2)] = new_link_dto # Convert the DTO to a configuration object and update the configuration file. diff --git a/antarest/study/business/scenario_builder_management.py b/antarest/study/business/scenario_builder_management.py index c8a38b28c3..ac5a6a2bb6 100644 --- a/antarest/study/business/scenario_builder_management.py +++ b/antarest/study/business/scenario_builder_management.py @@ -37,7 +37,7 @@ Rulesets: te.TypeAlias = t.MutableMapping[str, Ruleset] -class ScenarioType(str, enum.Enum): +class ScenarioType(enum.StrEnum): """ Scenario type diff --git a/antarest/study/business/table_mode_management.py b/antarest/study/business/table_mode_management.py index 342c1c5abb..b108c57cb8 100644 --- a/antarest/study/business/table_mode_management.py +++ b/antarest/study/business/table_mode_management.py @@ -95,11 +95,11 @@ def __init__( def _get_table_data_unsafe(self, study: RawStudy, table_type: TableModeType) -> TableDataDTO: if table_type == TableModeType.AREA: areas_map = self._area_manager.get_all_area_props(study) - data = {area_id: area.model_dump(by_alias=True) for area_id, area in areas_map.items()} + data = {area_id: area.model_dump(mode="json", by_alias=True) for area_id, area in areas_map.items()} elif table_type == TableModeType.LINK: links_map = self._link_manager.get_all_links_props(study) data = { - f"{area1_id} / {area2_id}": link.model_dump(by_alias=True) + f"{area1_id} / {area2_id}": link.model_dump(mode="json", by_alias=True) for (area1_id, area2_id), link in links_map.items() } elif table_type == TableModeType.THERMAL: diff --git a/antarest/study/business/thematic_trimming_management.py b/antarest/study/business/thematic_trimming_management.py index 96fd8aa106..d0ada30b65 100644 --- a/antarest/study/business/thematic_trimming_management.py +++ b/antarest/study/business/thematic_trimming_management.py @@ -51,7 +51,7 @@ def set_field_values(self, study: Study, field_values: ThematicTrimmingFormField Set Thematic Trimming config from the webapp form """ file_study = self.storage_service.get_storage(study).get_raw(study) - field_values_dict = field_values.model_dump() + field_values_dict = field_values.model_dump(mode="json") keys_by_bool: t.Dict[bool, t.List[t.Any]] = {True: [], False: []} fields_info = get_fields_info(StudyVersion.parse(study.version)) diff --git a/antarest/study/business/timeseries_config_management.py b/antarest/study/business/timeseries_config_management.py index da28a5e468..8b01bfe24e 100644 --- a/antarest/study/business/timeseries_config_management.py +++ b/antarest/study/business/timeseries_config_management.py @@ -133,7 +133,7 @@ def __set_field_values_for_type( field_values: TSFormFieldsForType, ) -> None: commands: t.List[UpdateConfig] = [] - values = field_values.model_dump() + values = field_values.model_dump(mode="json") for field, path in PATH_BY_TS_STR_FIELD.items(): field_val = values[field] diff --git a/antarest/study/business/xpansion_management.py b/antarest/study/business/xpansion_management.py index 318adde367..02e1fc795c 100644 --- a/antarest/study/business/xpansion_management.py +++ b/antarest/study/business/xpansion_management.py @@ -342,10 +342,12 @@ def create_xpansion_configuration(self, study: Study, zipped_config: t.Optional[ xpansion_settings = XpansionSettings() settings_obj = xpansion_settings.model_dump( - by_alias=True, exclude_none=True, exclude={"sensitivity_config"} + mode="json", by_alias=True, exclude_none=True, exclude={"sensitivity_config"} ) if xpansion_settings.sensitivity_config: - sensitivity_obj = xpansion_settings.sensitivity_config.model_dump(by_alias=True, exclude_none=True) + sensitivity_obj = xpansion_settings.sensitivity_config.model_dump( + mode="json", by_alias=True, exclude_none=True + ) else: sensitivity_obj = {} @@ -386,7 +388,7 @@ def update_xpansion_settings( actual_settings = self.get_xpansion_settings(study) settings_fields = new_xpansion_settings.model_dump( - by_alias=False, exclude_none=True, exclude={"sensitivity_config"} + mode="json", by_alias=False, exclude_none=True, exclude={"sensitivity_config"} ) updated_settings = actual_settings.copy(deep=True, update=settings_fields) @@ -407,11 +409,11 @@ def update_xpansion_settings( msg = f"Additional constraints file '{constraints_file}' does not exist" raise XpansionFileNotFoundError(msg) from None - config_obj = updated_settings.model_dump(by_alias=True, exclude={"sensitivity_config"}) + config_obj = updated_settings.model_dump(mode="json", by_alias=True, exclude={"sensitivity_config"}) file_study.tree.save(config_obj, ["user", "expansion", "settings"]) if new_xpansion_settings.sensitivity_config: - sensitivity_obj = new_xpansion_settings.sensitivity_config.model_dump(by_alias=True) + sensitivity_obj = new_xpansion_settings.sensitivity_config.model_dump(mode="json", by_alias=True) file_study.tree.save(sensitivity_obj, ["user", "expansion", "sensitivity", "sensitivity_in"]) return self.get_xpansion_settings(study) @@ -551,7 +553,7 @@ def add_candidate(self, study: Study, xpansion_candidate: XpansionCandidateDTO) ) # The primary key is actually the name, the id does not matter and is never checked. logger.info(f"Adding candidate '{xpansion_candidate.name}' to study '{study.id}'") - candidates_obj[next_id] = xpansion_candidate.model_dump(by_alias=True, exclude_none=True) + candidates_obj[next_id] = xpansion_candidate.model_dump(mode="json", by_alias=True, exclude_none=True) candidates_data = {"user": {"expansion": {"candidates": candidates_obj}}} file_study.tree.save(candidates_data) # Should we add a field in the study config containing the xpansion candidates like the links or the areas ? @@ -592,7 +594,9 @@ def update_candidate( for candidate_id, candidate in candidates.items(): if candidate["name"] == candidate_name: logger.info(f"Updating candidate '{candidate_name}' of study '{study.id}'") - candidates[candidate_id] = xpansion_candidate_dto.model_dump(by_alias=True, exclude_none=True) + candidates[candidate_id] = xpansion_candidate_dto.model_dump( + mode="json", by_alias=True, exclude_none=True + ) file_study.tree.save(candidates, ["user", "expansion", "candidates"]) return raise CandidateNotFoundError(f"The candidate '{xpansion_candidate_dto.name}' does not exist") diff --git a/antarest/study/model.py b/antarest/study/model.py index 2308c99f99..e511185c25 100644 --- a/antarest/study/model.py +++ b/antarest/study/model.py @@ -437,13 +437,13 @@ class StudySimResultDTO(AntaresBaseModel): archived: bool -class StudyDownloadType(str, enum.Enum): +class StudyDownloadType(enum.StrEnum): LINK = "LINK" DISTRICT = "DISTRICT" AREA = "AREA" -class StudyDownloadLevelDTO(str, enum.Enum): +class StudyDownloadLevelDTO(enum.StrEnum): ANNUAL = "annual" MONTHLY = "monthly" WEEKLY = "weekly" @@ -468,7 +468,7 @@ def inc_date(self, date: datetime) -> datetime: raise ShouldNotHappenException() -class ExportFormat(str, enum.Enum): +class ExportFormat(enum.StrEnum): ZIP = "application/zip" TAR_GZ = "application/tar+gz" JSON = "application/json" diff --git a/antarest/study/repository.py b/antarest/study/repository.py index a485b24652..6ecad45df7 100644 --- a/antarest/study/repository.py +++ b/antarest/study/repository.py @@ -119,7 +119,7 @@ class StudyFilter(AntaresBaseModel, frozen=True, extra="forbid"): access_permissions: AccessPermissions = AccessPermissions() -class StudySortBy(str, enum.Enum): +class StudySortBy(enum.StrEnum): """How to sort the results of studies query results""" NAME_ASC = "+name" diff --git a/antarest/study/service.py b/antarest/study/service.py index 4f86bdd781..1d1c2be1c6 100644 --- a/antarest/study/service.py +++ b/antarest/study/service.py @@ -1347,7 +1347,7 @@ def export_task(_notifier: TaskUpdateNotifier) -> TaskResult: return FileResponse(tmp_export_file, headers=headers, media_type=filetype) else: - json_response = to_json(matrix.model_dump()) + json_response = to_json(matrix.model_dump(mode="json")) return Response(content=json_response, media_type="application/json") def get_study_sim_result(self, study_id: str, params: RequestParameters) -> t.List[StudySimResultDTO]: @@ -2457,7 +2457,7 @@ def unarchive_output_task( src=str(src), dest=str(dest), remove_src=not keep_src_zip, - ).model_dump(), + ).model_dump(mode="json"), name=task_name, ref_id=study.id, request_params=params, diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/ruleset_matrices.py b/antarest/study/storage/rawstudy/model/filesystem/config/ruleset_matrices.py index 29da19c30e..6ed47a3e17 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/config/ruleset_matrices.py +++ b/antarest/study/storage/rawstudy/model/filesystem/config/ruleset_matrices.py @@ -397,11 +397,11 @@ def update_table_form(self, table_form: TableForm, scenario_type: str, *, nan_va scenario = self.scenarios[scenario_type] if isinstance(scenario, pd.DataFrame): simple_table_form = t.cast(SimpleTableForm, table_form) - df = pd.DataFrame(simple_table_form).transpose().replace([None, nan_value], np.nan) - scenario.at[df.index, df.columns] = df + df = pd.DataFrame.from_dict(simple_table_form, orient="index").replace([None, nan_value], np.nan) + scenario.loc[df.index, df.columns] = df else: cluster_table_form = t.cast(ClusterTableForm, table_form) for area, simple_table_form in cluster_table_form.items(): scenario = t.cast(pd.DataFrame, self.scenarios[scenario_type][area]) df = pd.DataFrame(simple_table_form).transpose().replace([None, nan_value], np.nan) - scenario.at[df.index, df.columns] = df + scenario.loc[df.index, df.columns] = df diff --git a/antarest/study/storage/rawstudy/model/filesystem/matrix/date_serializer.py b/antarest/study/storage/rawstudy/model/filesystem/matrix/date_serializer.py index cdc67b8ac5..2b8c431085 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/matrix/date_serializer.py +++ b/antarest/study/storage/rawstudy/model/filesystem/matrix/date_serializer.py @@ -10,6 +10,8 @@ # # This file is part of the Antares project. +from __future__ import annotations + import re from abc import ABC, abstractmethod from typing import Hashable, List, Sequence, Tuple, cast @@ -43,7 +45,7 @@ def __init__(self, area: str): self.area = area @abstractmethod - def extract_date(self, df: pd.DataFrame) -> Tuple[pd.Index, pd.DataFrame]: + def extract_date(self, df: pd.DataFrame) -> Tuple[pd.Index[str], pd.DataFrame]: """ Extract date from raw columns inside matrix file Args: @@ -54,7 +56,7 @@ def extract_date(self, df: pd.DataFrame) -> Tuple[pd.Index, pd.DataFrame]: raise NotImplementedError() @abstractmethod - def build_date(self, index: pd.Index) -> pd.DataFrame: + def build_date(self, index: pd.Index[str]) -> pd.DataFrame: """ Format in antares style date index Args: @@ -78,7 +80,7 @@ class HourlyMatrixSerializer(IDateMatrixSerializer): Class implementation for hourly index """ - def build_date(self, index: pd.Index) -> pd.DataFrame: + def build_date(self, index: pd.Index[str]) -> pd.DataFrame: def _map(row: str) -> Tuple[str, int, str, str, str]: m, d, h = re.split(r"[\s/]", row) return "", 1, d, IDateMatrixSerializer._R_MONTHS[m], h @@ -97,7 +99,7 @@ def _map(row: str) -> Tuple[str, int, str, str, str]: return pd.concat([headers, matrix], axis=0) - def extract_date(self, df: pd.DataFrame) -> Tuple[pd.Index, pd.DataFrame]: + def extract_date(self, df: pd.DataFrame) -> Tuple[pd.Index[str], pd.DataFrame]: # Extract left part with date df_date = df.iloc[:, 2:5] df_date.columns = pd.Index(data=["day", "month", "hour"]) @@ -108,7 +110,7 @@ def extract_date(self, df: pd.DataFrame) -> Tuple[pd.Index, pd.DataFrame]: to_remove = cast(Sequence[Hashable], df.columns[0:5]) body = df.drop(to_remove, axis=1) - return pd.Index(date), body + return pd.Index(date), body # type: ignore class DailyMatrixSerializer(IDateMatrixSerializer): @@ -116,7 +118,7 @@ class DailyMatrixSerializer(IDateMatrixSerializer): Class implementation for daily index """ - def build_date(self, index: pd.Index) -> pd.DataFrame: + def build_date(self, index: pd.Index[str]) -> pd.DataFrame: def _map(row: str) -> Tuple[str, int, str, str]: m, d = row.split("/") return "", 1, d, IDateMatrixSerializer._R_MONTHS[m] @@ -135,7 +137,7 @@ def _map(row: str) -> Tuple[str, int, str, str]: return pd.concat([headers, matrix], axis=0) - def extract_date(self, df: pd.DataFrame) -> Tuple[pd.Index, pd.DataFrame]: + def extract_date(self, df: pd.DataFrame) -> Tuple[pd.Index[str], pd.DataFrame]: # Extract left part with date df_date = df.iloc[:, 2:4] df_date.columns = pd.Index(["day", "month"]) @@ -146,7 +148,7 @@ def extract_date(self, df: pd.DataFrame) -> Tuple[pd.Index, pd.DataFrame]: to_remove = cast(Sequence[Hashable], df.columns[0:4]) body = df.drop(to_remove, axis=1) - return pd.Index(date), body + return pd.Index(date), body # type: ignore class WeeklyMatrixSerializer(IDateMatrixSerializer): @@ -154,7 +156,7 @@ class WeeklyMatrixSerializer(IDateMatrixSerializer): Class implementation for weekly index """ - def build_date(self, index: pd.Index) -> pd.DataFrame: + def build_date(self, index: pd.Index[str]) -> pd.DataFrame: matrix = pd.DataFrame({0: [""] * index.size, 1: index.values}) headers = pd.DataFrame( @@ -167,7 +169,7 @@ def build_date(self, index: pd.Index) -> pd.DataFrame: return pd.concat([headers, matrix], axis=0) - def extract_date(self, df: pd.DataFrame) -> Tuple[pd.Index, pd.DataFrame]: + def extract_date(self, df: pd.DataFrame) -> Tuple[pd.Index[str], pd.DataFrame]: # Extract left part with date df_date = df.iloc[:, 1:2] df_date.columns = pd.Index(["weekly"]) @@ -184,7 +186,7 @@ class MonthlyMatrixSerializer(IDateMatrixSerializer): Class implementation for monthly index """ - def build_date(self, index: pd.Index) -> pd.DataFrame: + def build_date(self, index: pd.Index[str]) -> pd.DataFrame: matrix = pd.DataFrame( { 0: [""] * index.size, @@ -203,7 +205,7 @@ def build_date(self, index: pd.Index) -> pd.DataFrame: return pd.concat([headers, matrix], axis=0) - def extract_date(self, df: pd.DataFrame) -> Tuple[pd.Index, pd.DataFrame]: + def extract_date(self, df: pd.DataFrame) -> Tuple[pd.Index[str], pd.DataFrame]: # Extract left part with date df_date = df.iloc[:, 2:3] df_date.columns = pd.Index(["month"]) @@ -222,7 +224,7 @@ class AnnualMatrixSerializer(IDateMatrixSerializer): Class implementation for annual index """ - def build_date(self, index: pd.Index) -> pd.DataFrame: + def build_date(self, index: pd.Index[str]) -> pd.DataFrame: return pd.DataFrame( [ [self.area.upper(), "annual"], @@ -232,7 +234,7 @@ def build_date(self, index: pd.Index) -> pd.DataFrame: ] ) - def extract_date(self, df: pd.DataFrame) -> Tuple[pd.Index, pd.DataFrame]: + def extract_date(self, df: pd.DataFrame) -> Tuple[pd.Index[str], pd.DataFrame]: # Extract left part with date df_date = df.iloc[:, 1:2] df_date.columns = pd.Index(["annual"]) diff --git a/antarest/study/storage/rawstudy/model/filesystem/matrix/matrix.py b/antarest/study/storage/rawstudy/model/filesystem/matrix/matrix.py index 9c421f59e1..427631427a 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/matrix/matrix.py +++ b/antarest/study/storage/rawstudy/model/filesystem/matrix/matrix.py @@ -12,7 +12,7 @@ import logging from abc import ABC, abstractmethod -from enum import Enum +from enum import StrEnum from pathlib import Path from typing import Any, List, Optional, Union, cast @@ -27,7 +27,7 @@ logger = logging.getLogger(__name__) -class MatrixFrequency(str, Enum): +class MatrixFrequency(StrEnum): """ An enumeration of matrix frequencies. diff --git a/antarest/study/storage/variantstudy/model/command/icommand.py b/antarest/study/storage/variantstudy/model/command/icommand.py index eb9a1f1285..537b5ddb5b 100644 --- a/antarest/study/storage/variantstudy/model/command/icommand.py +++ b/antarest/study/storage/variantstudy/model/command/icommand.py @@ -139,8 +139,8 @@ def match(self, other: "ICommand", equal: bool = False) -> bool: if not isinstance(other, self.__class__): return False excluded_fields = set(ICommand.model_fields) - this_values = self.model_dump(exclude=excluded_fields) - that_values = other.model_dump(exclude=excluded_fields) + this_values = self.model_dump(mode="json", exclude=excluded_fields) + that_values = other.model_dump(mode="json", exclude=excluded_fields) return this_values == that_values @abstractmethod diff --git a/antarest/study/storage/variantstudy/model/command/update_binding_constraint.py b/antarest/study/storage/variantstudy/model/command/update_binding_constraint.py index bc582d0036..423c431d38 100644 --- a/antarest/study/storage/variantstudy/model/command/update_binding_constraint.py +++ b/antarest/study/storage/variantstudy/model/command/update_binding_constraint.py @@ -168,7 +168,7 @@ def _apply(self, study_data: FileStudy) -> CommandOutput: study_version = study_data.config.version # rename matrices if the operator has changed for version >= 870 if self.operator and study_version >= STUDY_VERSION_8_7: - existing_operator = BindingConstraintOperator(actual_cfg.get("operator")) + existing_operator = BindingConstraintOperator(actual_cfg["operator"]) new_operator = self.operator update_matrices_names(study_data, self.id, existing_operator, new_operator) @@ -178,7 +178,7 @@ def _apply(self, study_data: FileStudy) -> CommandOutput: term for term in [m.value for m in TermMatrices] if hasattr(self, term) and getattr(self, term) ] - time_step = self.time_step or BindingConstraintFrequency(actual_cfg.get("type")) + time_step = self.time_step or BindingConstraintFrequency(actual_cfg["type"]) self.validates_and_fills_matrices( time_step=time_step, specific_matrices=updated_matrices or None, version=study_version, create=False ) diff --git a/antarest/study/web/study_data_blueprint.py b/antarest/study/web/study_data_blueprint.py index e5c3166261..afd7f119c0 100644 --- a/antarest/study/web/study_data_blueprint.py +++ b/antarest/study/web/study_data_blueprint.py @@ -94,7 +94,7 @@ class BCKeyValueType(te.TypedDict): value: t.Union[str, int, float, bool] -class ClusterType(str, enum.Enum): +class ClusterType(enum.StrEnum): """ Cluster type: diff --git a/docs/install/0-INSTALL.md b/docs/install/0-INSTALL.md index 388f98b5f7..8206af7c6e 100644 --- a/docs/install/0-INSTALL.md +++ b/docs/install/0-INSTALL.md @@ -8,7 +8,7 @@ A local build allows using Antares Web as a desktop application. Requirements: -- python : 3.8.x +- python : 3.11.x - node : 18.16.1 Then perform the following steps: diff --git a/pyproject.toml b/pyproject.toml index 42c259d133..cfde64c238 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,12 +8,9 @@ authors = [{name="RTE, Antares Web Team", email="andrea.sgattoni@rte-france.com" description="Antares Server" readme = {file = "README.md", content-type = "text/markdown"} license = {file = "LICENSE"} -requires-python = ">=3.8" +requires-python = ">=3.11" classifiers=[ "Programming Language :: Python :: 3 :: Only", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "License :: Apache License :: 2.0", "Operating System :: OS Independent", @@ -126,8 +123,8 @@ exclude = [ line-length = 120 indent-width = 4 -# Assume Python 3.8 -target-version = "py38" +# Assumes Python 3.11 +target-version = "py311" [tool.ruff.lint] # Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default. diff --git a/readthedocs.yml b/readthedocs.yml index bf990c8405..d0135160a8 100644 --- a/readthedocs.yml +++ b/readthedocs.yml @@ -8,7 +8,7 @@ version: 2 build: os: ubuntu-22.04 tools: - python: '3.8' + python: '3.11' mkdocs: configuration: mkdocs.yml diff --git a/requirements-dev.txt b/requirements-dev.txt index 462d77bb3c..14c06c1ed6 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -4,16 +4,16 @@ black~=23.7.0 isort~=5.12.0 mypy~=1.11.1 -pyinstaller==5.6.2 -pyinstaller-hooks-contrib==2024.6 +pyinstaller==6.10.0 +pyinstaller-hooks-contrib==2024.8 # Extra requirements installed by `mypy --install-types`. # IMPORTANT: Make sure the versions of these typing libraries match the versions # of the corresponding implementation libraries used in production (in `requirements.txt`). -pandas-stubs~=1.4.0 +pandas-stubs~=2.2.2 types-paramiko~=3.4.0 -types-psycopg2~=2.9.4 +types-psycopg2~=2.9.9 types-redis~=4.1.2 types-requests~=2.27.1 types-PyYAML~=5.4.1 diff --git a/requirements.txt b/requirements.txt index eefa07d92f..2e191bb01f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -33,18 +33,16 @@ humanize~=4.11.0; python_version > '3.8' jsonref~=0.2 PyJWT~=2.9.0 MarkupSafe~=2.0.1 -numpy~=1.22.1 -pandas~=1.4.0 +numpy~=1.26.4 +pandas~=2.2.3 paramiko~=3.4.1 plyer~=2.0.0 -psycopg2-binary==2.9.4 +psycopg2-binary~=2.9.9 py7zr~=0.20.6 python-json-logger~=2.0.7 -PyYAML~=5.4.1; python_version <= '3.9' -PyYAML~=5.3.1; python_version > '3.9' +PyYAML~=5.3.1 redis~=4.1.2 SQLAlchemy~=1.4.46 -tables==3.6.1; python_version <= '3.8' -tables==3.9.2; python_version > '3.8' +tables==3.9.2 typing_extensions~=4.12.2 xlsxwriter~=3.2.0 \ No newline at end of file diff --git a/scripts/install-debug.sh b/scripts/install-debug.sh deleted file mode 100755 index 2ada4a30c4..0000000000 --- a/scripts/install-debug.sh +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/bash - -set -e - -python3 -m venv /debug_env -source /debug_env/bin/activate -pip3 install pystack-debugger -deactivate \ No newline at end of file diff --git a/scripts/stacktrace.sh b/scripts/stacktrace.sh deleted file mode 100755 index 4ce721ad78..0000000000 --- a/scripts/stacktrace.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/bin/bash - -source /debug_env/bin/activate -echo "Printing stacktraces" -ps aux | grep python | awk '{print $2}' | xargs -I{} bash -c "echo {}; pystack --include-greenlet {}" -echo "Done" \ No newline at end of file diff --git a/sonar-project.properties b/sonar-project.properties index fa40909147..dd7384df7d 100644 --- a/sonar-project.properties +++ b/sonar-project.properties @@ -4,7 +4,7 @@ sonar.sources=antarest, webapp/src sonar.language=python, js sonar.exclusions=antarest/gui.py,antarest/main.py sonar.python.coverage.reportPaths=coverage.xml -sonar.python.version=3.8 +sonar.python.version=3.11 sonar.javascript.lcov.reportPaths=webapp/coverage/lcov.info sonar.projectVersion=2.17.6 sonar.coverage.exclusions=antarest/gui.py,antarest/main.py,antarest/singleton_services.py,antarest/worker/archive_worker_service.py,webapp/**/*,,antarest/fastapi_jwt_auth/** \ No newline at end of file diff --git a/tests/cache/test_local_cache.py b/tests/cache/test_local_cache.py index 3242d92533..dc6382cac5 100644 --- a/tests/cache/test_local_cache.py +++ b/tests/cache/test_local_cache.py @@ -43,11 +43,11 @@ def test_lifecycle(): id = "some_id" duration = 3600 timeout = int(time.time()) + duration - cache_element = LocalCacheElement(duration=duration, data=config.model_dump(), timeout=timeout) + cache_element = LocalCacheElement(duration=duration, data=config.model_dump(mode="json"), timeout=timeout) # PUT - cache.put(id=id, data=config.model_dump(), duration=duration) + cache.put(id=id, data=config.model_dump(mode="json"), duration=duration) assert cache.cache[id] == cache_element # GET - assert cache.get(id=id) == config.model_dump() + assert cache.get(id=id) == config.model_dump(mode="json") diff --git a/tests/cache/test_redis_cache.py b/tests/cache/test_redis_cache.py index f5fe9c37a3..808131fabb 100644 --- a/tests/cache/test_redis_cache.py +++ b/tests/cache/test_redis_cache.py @@ -42,7 +42,7 @@ def test_lifecycle(): id = "some_id" redis_key = f"cache:{id}" duration = 3600 - cache_element = RedisCacheElement(duration=duration, data=config.model_dump()).model_dump_json() + cache_element = RedisCacheElement(duration=duration, data=config.model_dump(mode="json")).model_dump_json() # GET redis_client.get.return_value = cache_element @@ -53,7 +53,7 @@ def test_lifecycle(): # PUT duration = 7200 - cache_element = RedisCacheElement(duration=duration, data=config.model_dump()).model_dump_json() - cache.put(id=id, data=config.model_dump(), duration=duration) + cache_element = RedisCacheElement(duration=duration, data=config.model_dump(mode="json")).model_dump_json() + cache.put(id=id, data=config.model_dump(mode="json"), duration=duration) redis_client.set.assert_called_once_with(redis_key, cache_element) redis_client.expire.assert_called_with(redis_key, duration) diff --git a/tests/integration/test_integration.py b/tests/integration/test_integration.py index da7b312e9c..a9fa59088f 100644 --- a/tests/integration/test_integration.py +++ b/tests/integration/test_integration.py @@ -606,15 +606,15 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: res = client.get(f"/v1/studies/{study_id}/layers") res.raise_for_status() - assert res.json() == [LayerInfoDTO(id="0", name="All", areas=["area 1", "area 2"]).model_dump()] + assert res.json() == [LayerInfoDTO(id="0", name="All", areas=["area 1", "area 2"]).model_dump(mode="json")] res = client.post(f"/v1/studies/{study_id}/layers?name=test") assert res.json() == "1" res = client.get(f"/v1/studies/{study_id}/layers") assert res.json() == [ - LayerInfoDTO(id="0", name="All", areas=["area 1", "area 2"]).model_dump(), - LayerInfoDTO(id="1", name="test", areas=[]).model_dump(), + LayerInfoDTO(id="0", name="All", areas=["area 1", "area 2"]).model_dump(mode="json"), + LayerInfoDTO(id="1", name="test", areas=[]).model_dump(mode="json"), ] res = client.put(f"/v1/studies/{study_id}/layers/1?name=test2") @@ -625,8 +625,8 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: assert res.status_code in {200, 201}, res.json() res = client.get(f"/v1/studies/{study_id}/layers") assert res.json() == [ - LayerInfoDTO(id="0", name="All", areas=["area 1", "area 2"]).model_dump(), - LayerInfoDTO(id="1", name="test2", areas=["area 2"]).model_dump(), + LayerInfoDTO(id="0", name="All", areas=["area 1", "area 2"]).model_dump(mode="json"), + LayerInfoDTO(id="1", name="test2", areas=["area 2"]).model_dump(mode="json"), ] # Delete the layer '1' that has 1 area diff --git a/tests/variantstudy/conftest.py b/tests/variantstudy/conftest.py index beaaf34065..6d3039d4f9 100644 --- a/tests/variantstudy/conftest.py +++ b/tests/variantstudy/conftest.py @@ -94,11 +94,7 @@ def get_matrix_id(matrix: t.Union[t.List[t.List[float]], str]) -> str: Get the matrix ID from a matrix or a matrix link. """ if isinstance(matrix, str): - # str.removeprefix() is not available in Python 3.8 - prefix = "matrix://" - if matrix.startswith(prefix): - return matrix[len(prefix) :] - return matrix + return matrix.removeprefix("matrix://") elif isinstance(matrix, list): return create(matrix) else: diff --git a/tests/variantstudy/model/command/test_create_area.py b/tests/variantstudy/model/command/test_create_area.py index 0bb8c104c7..d6593a929b 100644 --- a/tests/variantstudy/model/command/test_create_area.py +++ b/tests/variantstudy/model/command/test_create_area.py @@ -15,6 +15,7 @@ import pytest +from antarest.study.storage.rawstudy.ini_reader import IniReader from antarest.study.storage.rawstudy.model.filesystem.config.model import EnrModelling, transform_name_to_id from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy from antarest.study.storage.variantstudy.business.command_reverter import CommandReverter @@ -96,9 +97,9 @@ def test_apply( # Allocation assert (study_path / "input" / "hydro" / "allocation" / f"{area_id}.ini").exists() - allocation = configparser.ConfigParser() - allocation.read(study_path / "input" / "hydro" / "allocation" / f"{area_id}.ini") - assert int(allocation["[allocation"][area_id]) == 1 + reader = IniReader() + allocation = reader.read(study_path / "input" / "hydro" / "allocation" / f"{area_id}.ini") + assert int(allocation["[allocation]"][area_id]) == 1 # Capacity assert (study_path / "input" / "hydro" / "common" / "capacity" / f"maxpower_{area_id}.txt.link").exists() diff --git a/tests/variantstudy/test_command_factory.py b/tests/variantstudy/test_command_factory.py index b78ba393e5..dd47e0453c 100644 --- a/tests/variantstudy/test_command_factory.py +++ b/tests/variantstudy/test_command_factory.py @@ -386,11 +386,7 @@ @pytest.fixture def command_factory() -> CommandFactory: def get_matrix_id(matrix: str) -> str: - # str.removeprefix() is not available in Python 3.8 - prefix = "matrix://" - if matrix.startswith(prefix): - return matrix[len(prefix) :] - return matrix + return matrix.removeprefix("matrix://") return CommandFactory( generator_matrix_constants=Mock(spec=GeneratorMatrixConstants),