diff --git a/antarest/core/cache/business/local_chache.py b/antarest/core/cache/business/local_chache.py
index ac2a026db5..e903ff9080 100644
--- a/antarest/core/cache/business/local_chache.py
+++ b/antarest/core/cache/business/local_chache.py
@@ -15,16 +15,15 @@
import time
from typing import Dict, List, Optional
-from pydantic import BaseModel
-
from antarest.core.config import CacheConfig
from antarest.core.interfaces.cache import ICache
from antarest.core.model import JSON
+from antarest.core.serialization import AntaresBaseModel
logger = logging.getLogger(__name__)
-class LocalCacheElement(BaseModel):
+class LocalCacheElement(AntaresBaseModel):
timeout: int
duration: int
data: JSON
diff --git a/antarest/core/cache/business/redis_cache.py b/antarest/core/cache/business/redis_cache.py
index 7793f280c7..11eb3fcffd 100644
--- a/antarest/core/cache/business/redis_cache.py
+++ b/antarest/core/cache/business/redis_cache.py
@@ -13,17 +13,16 @@
import logging
from typing import List, Optional
-from pydantic import BaseModel
from redis.client import Redis
from antarest.core.interfaces.cache import ICache
from antarest.core.model import JSON
-from antarest.core.serialization import from_json
+from antarest.core.serialization import AntaresBaseModel, from_json
logger = logging.getLogger(__name__)
-class RedisCacheElement(BaseModel):
+class RedisCacheElement(AntaresBaseModel):
duration: int
data: JSON
diff --git a/antarest/core/configdata/model.py b/antarest/core/configdata/model.py
index af599d8861..3a4512e44c 100644
--- a/antarest/core/configdata/model.py
+++ b/antarest/core/configdata/model.py
@@ -13,13 +13,13 @@
from enum import StrEnum
from typing import Any, Optional
-from pydantic import BaseModel
from sqlalchemy import Column, Integer, String # type: ignore
from antarest.core.persistence import Base
+from antarest.core.serialization import AntaresBaseModel
-class ConfigDataDTO(BaseModel):
+class ConfigDataDTO(AntaresBaseModel):
key: str
value: Optional[str]
diff --git a/antarest/core/core_blueprint.py b/antarest/core/core_blueprint.py
index 27f6591109..d344531699 100644
--- a/antarest/core/core_blueprint.py
+++ b/antarest/core/core_blueprint.py
@@ -13,14 +13,14 @@
from typing import Any
from fastapi import APIRouter
-from pydantic import BaseModel
from antarest.core.config import Config
+from antarest.core.serialization import AntaresBaseModel
from antarest.core.utils.web import APITag
from antarest.core.version_info import VersionInfoDTO, get_commit_id, get_dependencies
-class StatusDTO(BaseModel):
+class StatusDTO(AntaresBaseModel):
status: str
diff --git a/antarest/core/filetransfer/model.py b/antarest/core/filetransfer/model.py
index 72463e0bad..6ce194ed19 100644
--- a/antarest/core/filetransfer/model.py
+++ b/antarest/core/filetransfer/model.py
@@ -15,10 +15,10 @@
from http.client import HTTPException
from typing import Optional
-from pydantic import BaseModel
from sqlalchemy import Boolean, Column, DateTime, Integer, String # type: ignore
from antarest.core.persistence import Base
+from antarest.core.serialization import AntaresBaseModel
class FileDownloadNotFound(HTTPException):
@@ -37,7 +37,7 @@ def __init__(self) -> None:
)
-class FileDownloadDTO(BaseModel):
+class FileDownloadDTO(AntaresBaseModel):
id: str
name: str
filename: str
@@ -47,7 +47,7 @@ class FileDownloadDTO(BaseModel):
error_message: str = ""
-class FileDownloadTaskDTO(BaseModel):
+class FileDownloadTaskDTO(AntaresBaseModel):
file: FileDownloadDTO
task: str
diff --git a/antarest/core/interfaces/eventbus.py b/antarest/core/interfaces/eventbus.py
index 8ac715e459..b9f6b72b41 100644
--- a/antarest/core/interfaces/eventbus.py
+++ b/antarest/core/interfaces/eventbus.py
@@ -14,9 +14,8 @@
from enum import StrEnum
from typing import Any, Awaitable, Callable, List, Optional
-from pydantic import BaseModel
-
from antarest.core.model import PermissionInfo
+from antarest.core.serialization import AntaresBaseModel
class EventType(StrEnum):
@@ -56,7 +55,7 @@ class EventChannelDirectory:
STUDY_GENERATION = "GENERATION_TASK/"
-class Event(BaseModel):
+class Event(AntaresBaseModel):
type: EventType
payload: Any
permissions: PermissionInfo
diff --git a/antarest/core/jwt.py b/antarest/core/jwt.py
index b42cc3273b..aa8323abb8 100644
--- a/antarest/core/jwt.py
+++ b/antarest/core/jwt.py
@@ -12,13 +12,12 @@
from typing import List, Union
-from pydantic import BaseModel
-
from antarest.core.roles import RoleType
+from antarest.core.serialization import AntaresBaseModel
from antarest.login.model import ADMIN_ID, Group, Identity
-class JWTGroup(BaseModel):
+class JWTGroup(AntaresBaseModel):
"""
Sub JWT domain with groups data belongs to user
"""
@@ -28,7 +27,7 @@ class JWTGroup(BaseModel):
role: RoleType
-class JWTUser(BaseModel):
+class JWTUser(AntaresBaseModel):
"""
JWT domain with user data.
"""
diff --git a/antarest/core/model.py b/antarest/core/model.py
index b086745064..78aa7a1e82 100644
--- a/antarest/core/model.py
+++ b/antarest/core/model.py
@@ -13,7 +13,7 @@
import enum
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union
-from pydantic import BaseModel
+from antarest.core.serialization import AntaresBaseModel
if TYPE_CHECKING:
# These dependencies are only used for type checking with mypy.
@@ -43,7 +43,7 @@ class StudyPermissionType(enum.StrEnum):
MANAGE_PERMISSIONS = "MANAGE_PERMISSIONS"
-class PermissionInfo(BaseModel):
+class PermissionInfo(AntaresBaseModel):
owner: Optional[int] = None
groups: List[str] = []
public_mode: PublicMode = PublicMode.NONE
diff --git a/antarest/core/serialization/__init__.py b/antarest/core/serialization/__init__.py
index 6368c02f1e..a8616e3eae 100644
--- a/antarest/core/serialization/__init__.py
+++ b/antarest/core/serialization/__init__.py
@@ -32,3 +32,18 @@ def to_json(data: t.Any, indent: t.Optional[int] = None) -> bytes:
def to_json_string(data: t.Any, indent: t.Optional[int] = None) -> str:
return to_json(data, indent=indent).decode("utf-8")
+
+
+class AntaresBaseModel(pydantic.BaseModel):
+ """
+ Due to pydantic migration from v1 to v2, we can have this issue:
+
+ class A(BaseModel):
+ a: str
+
+ A(a=2) raises ValidationError as we give an int instead of a str
+
+ To avoid this issue we created our own BaseModel class that inherits from BaseModel and allows such object creation.
+ """
+
+ model_config = pydantic.config.ConfigDict(coerce_numbers_to_str=True)
diff --git a/antarest/core/tasks/model.py b/antarest/core/tasks/model.py
index 7c73506604..6fe1f1a29b 100644
--- a/antarest/core/tasks/model.py
+++ b/antarest/core/tasks/model.py
@@ -15,12 +15,12 @@
from datetime import datetime
from enum import Enum, StrEnum
-from pydantic import BaseModel
from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, Sequence, String # type: ignore
from sqlalchemy.engine.base import Engine # type: ignore
from sqlalchemy.orm import relationship, sessionmaker # type: ignore
from antarest.core.persistence import Base
+from antarest.core.serialization import AntaresBaseModel
if t.TYPE_CHECKING:
# avoid circular import
@@ -57,30 +57,30 @@ def is_final(self) -> bool:
]
-class TaskResult(BaseModel, extra="forbid"):
+class TaskResult(AntaresBaseModel, extra="forbid"):
success: bool
message: str
# Can be used to store json serialized result
return_value: t.Optional[str] = None
-class TaskLogDTO(BaseModel, extra="forbid"):
+class TaskLogDTO(AntaresBaseModel, extra="forbid"):
id: str
message: str
-class CustomTaskEventMessages(BaseModel, extra="forbid"):
+class CustomTaskEventMessages(AntaresBaseModel, extra="forbid"):
start: str
running: str
end: str
-class TaskEventPayload(BaseModel, extra="forbid"):
+class TaskEventPayload(AntaresBaseModel, extra="forbid"):
id: str
message: str
-class TaskDTO(BaseModel, extra="forbid"):
+class TaskDTO(AntaresBaseModel, extra="forbid"):
id: str
name: str
owner: t.Optional[int] = None
@@ -93,7 +93,7 @@ class TaskDTO(BaseModel, extra="forbid"):
ref_id: t.Optional[str] = None
-class TaskListFilter(BaseModel, extra="forbid"):
+class TaskListFilter(AntaresBaseModel, extra="forbid"):
status: t.List[TaskStatus] = []
name: t.Optional[str] = None
type: t.List[TaskType] = []
diff --git a/antarest/core/utils/__init__.py b/antarest/core/utils/__init__.py
index 058c6b221a..d1e93fb6a8 100644
--- a/antarest/core/utils/__init__.py
+++ b/antarest/core/utils/__init__.py
@@ -9,3 +9,5 @@
# SPDX-License-Identifier: MPL-2.0
#
# This file is part of the Antares project.
+
+__all__ = "AntaresBaseModel"
diff --git a/antarest/core/utils/utils.py b/antarest/core/utils/utils.py
index 89002edcfe..63576fc1a4 100644
--- a/antarest/core/utils/utils.py
+++ b/antarest/core/utils/utils.py
@@ -24,10 +24,8 @@
from pathlib import Path
import py7zr
-import redis
from fastapi import HTTPException
-from antarest.core.config import RedisConfig
from antarest.core.exceptions import ShouldNotHappenException
logger = logging.getLogger(__name__)
@@ -131,17 +129,6 @@ def get_local_path() -> Path:
return filepath
-def new_redis_instance(config: RedisConfig) -> redis.Redis: # type: ignore
- redis_client = redis.Redis(
- host=config.host,
- port=config.port,
- password=config.password,
- db=0,
- retry_on_error=[redis.ConnectionError, redis.TimeoutError], # type: ignore
- )
- return redis_client # type: ignore
-
-
class StopWatch:
def __init__(self) -> None:
self.current_time: float = time.time()
diff --git a/antarest/core/version_info.py b/antarest/core/version_info.py
index ea96eaf4ef..ec20a767c9 100644
--- a/antarest/core/version_info.py
+++ b/antarest/core/version_info.py
@@ -18,10 +18,10 @@
from pathlib import Path
from typing import Dict
-from pydantic import BaseModel
+from antarest.core.serialization import AntaresBaseModel
-class VersionInfoDTO(BaseModel):
+class VersionInfoDTO(AntaresBaseModel):
name: str = "AntaREST"
version: str
gitcommit: str
diff --git a/antarest/eventbus/web.py b/antarest/eventbus/web.py
index d997e1251d..d2d9405235 100644
--- a/antarest/eventbus/web.py
+++ b/antarest/eventbus/web.py
@@ -17,7 +17,6 @@
from typing import List, Optional
from fastapi import Depends, HTTPException, Query
-from pydantic import BaseModel
from starlette.websockets import WebSocket, WebSocketDisconnect
from antarest.core.application import AppBuildContext
@@ -26,7 +25,7 @@
from antarest.core.jwt import DEFAULT_ADMIN_USER, JWTUser
from antarest.core.model import PermissionInfo, StudyPermissionType
from antarest.core.permissions import check_permission
-from antarest.core.serialization import to_json_string
+from antarest.core.serialization import AntaresBaseModel, to_json_string
from antarest.fastapi_jwt_auth import AuthJWT
from antarest.login.auth import Auth
@@ -38,7 +37,7 @@ class WebsocketMessageAction(StrEnum):
UNSUBSCRIBE = "UNSUBSCRIBE"
-class WebsocketMessage(BaseModel):
+class WebsocketMessage(AntaresBaseModel):
action: WebsocketMessageAction
payload: str
diff --git a/antarest/front.py b/antarest/front.py
index 8de0f05e82..a0699812bf 100644
--- a/antarest/front.py
+++ b/antarest/front.py
@@ -25,13 +25,13 @@
from typing import Any, Optional, Sequence
from fastapi import FastAPI
-from pydantic import BaseModel
from starlette.middleware.base import BaseHTTPMiddleware, DispatchFunction, RequestResponseEndpoint
from starlette.requests import Request
from starlette.responses import FileResponse
from starlette.staticfiles import StaticFiles
from starlette.types import ASGIApp
+from antarest.core.serialization import AntaresBaseModel
from antarest.core.utils.string import to_camel_case
@@ -77,7 +77,7 @@ async def dispatch(self, request: Request, call_next: RequestResponseEndpoint) -
return await call_next(request)
-class BackEndConfig(BaseModel):
+class BackEndConfig(AntaresBaseModel):
"""
Configuration about backend URLs served to the frontend.
"""
diff --git a/antarest/launcher/adapters/abstractlauncher.py b/antarest/launcher/adapters/abstractlauncher.py
index e096eae2a2..ac36176300 100644
--- a/antarest/launcher/adapters/abstractlauncher.py
+++ b/antarest/launcher/adapters/abstractlauncher.py
@@ -15,6 +15,8 @@
from pathlib import Path
from typing import Callable, Dict, List, NamedTuple, Optional
+from antares.study.version import SolverVersion
+
from antarest.core.config import Config
from antarest.core.interfaces.cache import ICache
from antarest.core.interfaces.eventbus import Event, EventChannelDirectory, EventType, IEventBus
@@ -71,7 +73,7 @@ def run_study(
self,
study_uuid: str,
job_id: str,
- version: str,
+ version: SolverVersion,
launcher_parameters: LauncherParametersDTO,
params: RequestParameters,
) -> None:
diff --git a/antarest/launcher/adapters/local_launcher/local_launcher.py b/antarest/launcher/adapters/local_launcher/local_launcher.py
index 4db8beb5ae..a3ad8fc703 100644
--- a/antarest/launcher/adapters/local_launcher/local_launcher.py
+++ b/antarest/launcher/adapters/local_launcher/local_launcher.py
@@ -22,6 +22,8 @@
from typing import Callable, Dict, Optional, Tuple, cast
from uuid import UUID
+from antares.study.version import SolverVersion
+
from antarest.core.config import Config
from antarest.core.interfaces.cache import ICache
from antarest.core.interfaces.eventbus import IEventBus
@@ -75,11 +77,11 @@ def run_study(
self,
study_uuid: str,
job_id: str,
- version: str,
+ version: SolverVersion,
launcher_parameters: LauncherParametersDTO,
params: RequestParameters,
) -> None:
- antares_solver_path = self._select_best_binary(version)
+ antares_solver_path = self._select_best_binary(f"{version:ddd}")
job = threading.Thread(
target=LocalLauncher._compute,
diff --git a/antarest/launcher/adapters/log_parser.py b/antarest/launcher/adapters/log_parser.py
index efd73d1b70..16cadd74ea 100644
--- a/antarest/launcher/adapters/log_parser.py
+++ b/antarest/launcher/adapters/log_parser.py
@@ -14,7 +14,7 @@
import re
import typing as t
-from pydantic import BaseModel
+from antarest.core.serialization import AntaresBaseModel
_SearchFunc = t.Callable[[str], t.Optional[t.Match[str]]]
@@ -63,7 +63,7 @@
)
-class LaunchProgressDTO(BaseModel):
+class LaunchProgressDTO(AntaresBaseModel):
"""
Measure the progress of a study simulation.
diff --git a/antarest/launcher/adapters/slurm_launcher/slurm_launcher.py b/antarest/launcher/adapters/slurm_launcher/slurm_launcher.py
index 74132b89f0..577ee031b5 100644
--- a/antarest/launcher/adapters/slurm_launcher/slurm_launcher.py
+++ b/antarest/launcher/adapters/slurm_launcher/slurm_launcher.py
@@ -22,6 +22,7 @@
import typing as t
from pathlib import Path
+from antares.study.version import SolverVersion
from antareslauncher.data_repo.data_repo_tinydb import DataRepoTinydb
from antareslauncher.main import MainParameters, run_with
from antareslauncher.main_option_parser import MainOptionParser, ParserParameters
@@ -496,7 +497,7 @@ def _run_study(
study_uuid: str,
launch_uuid: str,
launcher_params: LauncherParametersDTO,
- version: str,
+ version: SolverVersion,
) -> None:
study_path = Path(self.launcher_args.studies_in) / launch_uuid
@@ -512,7 +513,7 @@ def _run_study(
append_log(launch_uuid, "Checking study version...")
available_versions = self.slurm_config.antares_versions_on_remote_server
- if version not in available_versions:
+ if f"{version:ddd}" not in available_versions:
raise VersionNotSupportedError(
f"Study version '{version}' is not supported. Currently supported versions are"
f" {', '.join(available_versions)}"
@@ -591,7 +592,7 @@ def run_study(
self,
study_uuid: str,
job_id: str,
- version: str,
+ version: SolverVersion,
launcher_parameters: LauncherParametersDTO,
params: RequestParameters,
) -> None:
@@ -644,11 +645,15 @@ def kill_job(self, job_id: str, dispatch: bool = True) -> None:
)
-def _override_solver_version(study_path: Path, version: str) -> None:
+def _override_solver_version(study_path: Path, version: SolverVersion) -> None:
study_info_path = study_path / "study.antares"
study_info = IniReader().read(study_info_path)
if "antares" in study_info:
- study_info["antares"]["solver_version"] = version
+ if version.major < 9: # should be written as XYZ
+ version_to_write = f"{version:ddd}"
+ else: # should be written as X.Y
+ version_to_write = f"{version:2d}"
+ study_info["antares"]["solver_version"] = version_to_write
IniWriter().write(study_info, study_info_path)
else:
logger.warning("Failed to find antares study info")
diff --git a/antarest/launcher/model.py b/antarest/launcher/model.py
index 5096228635..dbd90e3ef4 100644
--- a/antarest/launcher/model.py
+++ b/antarest/launcher/model.py
@@ -14,23 +14,23 @@
import typing as t
from datetime import datetime
-from pydantic import BaseModel, Field
+from pydantic import Field
from sqlalchemy import Column, DateTime, Enum, ForeignKey, Integer, Sequence, String # type: ignore
from sqlalchemy.orm import relationship # type: ignore
from antarest.core.persistence import Base
-from antarest.core.serialization import from_json
+from antarest.core.serialization import AntaresBaseModel, from_json
from antarest.login.model import Identity, UserInfo
from antarest.study.business.all_optional_meta import camel_case_model
-class XpansionParametersDTO(BaseModel):
+class XpansionParametersDTO(AntaresBaseModel):
output_id: t.Optional[str] = None
sensitivity_mode: bool = False
enabled: bool = True
-class LauncherParametersDTO(BaseModel):
+class LauncherParametersDTO(AntaresBaseModel):
# Warning ! This class must be retro-compatible (that's the reason for the weird bool/XpansionParametersDTO union)
# The reason is that it's stored in json format in database and deserialized using the latest class version
# If compatibility is to be broken, an (alembic) data migration script should be added
@@ -91,7 +91,7 @@ class JobLogType(enum.StrEnum):
AFTER = "AFTER"
-class JobResultDTO(BaseModel):
+class JobResultDTO(AntaresBaseModel):
"""
A data transfer object (DTO) representing the job result.
@@ -232,16 +232,16 @@ def __repr__(self) -> str:
)
-class JobCreationDTO(BaseModel):
+class JobCreationDTO(AntaresBaseModel):
job_id: str
-class LauncherEnginesDTO(BaseModel):
+class LauncherEnginesDTO(AntaresBaseModel):
engines: t.List[str]
@camel_case_model
-class LauncherLoadDTO(BaseModel, extra="forbid", validate_assignment=True, populate_by_name=True):
+class LauncherLoadDTO(AntaresBaseModel, extra="forbid", validate_assignment=True, populate_by_name=True):
"""
DTO representing the load of the SLURM cluster or local machine.
diff --git a/antarest/launcher/service.py b/antarest/launcher/service.py
index 969789dee8..d9bb58c532 100644
--- a/antarest/launcher/service.py
+++ b/antarest/launcher/service.py
@@ -20,6 +20,7 @@
from typing import Dict, List, Optional, cast
from uuid import UUID, uuid4
+from antares.study.version import SolverVersion
from fastapi import HTTPException
from antarest.core.config import Config, Launcher, NbCoresConfig
@@ -228,7 +229,7 @@ def run_study(
job_uuid = self._generate_new_id()
logger.info(f"New study launch (study={study_uuid}, job_id={job_uuid})")
study_info = self.study_service.get_study_information(uuid=study_uuid, params=params)
- solver_version = study_version or study_info.version
+ solver_version = SolverVersion.parse(study_version or study_info.version)
self._assert_launcher_is_initialized(launcher)
assert_permission(
@@ -252,7 +253,7 @@ def run_study(
self.launchers[launcher].run_study(
study_uuid,
job_uuid,
- str(solver_version),
+ solver_version,
launcher_parameters,
params,
)
diff --git a/antarest/launcher/ssh_config.py b/antarest/launcher/ssh_config.py
index 5238e07608..7d4524d04d 100644
--- a/antarest/launcher/ssh_config.py
+++ b/antarest/launcher/ssh_config.py
@@ -14,10 +14,12 @@
from typing import Any, Dict, Optional
import paramiko
-from pydantic import BaseModel, model_validator
+from pydantic import model_validator
+from antarest.core.serialization import AntaresBaseModel
-class SSHConfigDTO(BaseModel):
+
+class SSHConfigDTO(AntaresBaseModel):
config_path: pathlib.Path
username: str
hostname: str
diff --git a/antarest/login/auth.py b/antarest/login/auth.py
index f86ca5903b..e0227a51f5 100644
--- a/antarest/login/auth.py
+++ b/antarest/login/auth.py
@@ -15,13 +15,12 @@
from typing import Any, Callable, Coroutine, Dict, Optional, Tuple, Union
from fastapi import Depends
-from pydantic import BaseModel
from ratelimit.types import Scope # type: ignore
from starlette.requests import Request
from antarest.core.config import Config
from antarest.core.jwt import DEFAULT_ADMIN_USER, JWTUser
-from antarest.core.serialization import from_json
+from antarest.core.serialization import AntaresBaseModel, from_json
from antarest.fastapi_jwt_auth import AuthJWT
logger = logging.getLogger(__name__)
@@ -79,7 +78,7 @@ def get_user_from_token(token: str, jwt_manager: AuthJWT) -> Optional[JWTUser]:
return None
-class JwtSettings(BaseModel):
+class JwtSettings(AntaresBaseModel):
authjwt_secret_key: str
authjwt_token_location: Tuple[str, ...]
authjwt_access_token_expires: Union[int, timedelta] = Auth.ACCESS_TOKEN_DURATION
diff --git a/antarest/login/model.py b/antarest/login/model.py
index 4f85763c9b..11a3bef802 100644
--- a/antarest/login/model.py
+++ b/antarest/login/model.py
@@ -15,7 +15,6 @@
import uuid
import bcrypt
-from pydantic.main import BaseModel
from sqlalchemy import Boolean, Column, Enum, ForeignKey, Integer, Sequence, String # type: ignore
from sqlalchemy.engine.base import Engine # type: ignore
from sqlalchemy.exc import IntegrityError # type: ignore
@@ -24,6 +23,7 @@
from antarest.core.persistence import Base
from antarest.core.roles import RoleType
+from antarest.core.serialization import AntaresBaseModel
if t.TYPE_CHECKING:
# avoid circular import
@@ -44,58 +44,58 @@
"""Name of the site administrator."""
-class UserInfo(BaseModel):
+class UserInfo(AntaresBaseModel):
id: int
name: str
-class BotRoleCreateDTO(BaseModel):
+class BotRoleCreateDTO(AntaresBaseModel):
group: str
role: int
-class BotCreateDTO(BaseModel):
+class BotCreateDTO(AntaresBaseModel):
name: str
roles: t.List[BotRoleCreateDTO]
is_author: bool = True
-class UserCreateDTO(BaseModel):
+class UserCreateDTO(AntaresBaseModel):
name: str
password: str
-class GroupDTO(BaseModel):
+class GroupDTO(AntaresBaseModel):
id: t.Optional[str] = None
name: str
-class RoleCreationDTO(BaseModel):
+class RoleCreationDTO(AntaresBaseModel):
type: RoleType
group_id: str
identity_id: int
-class RoleDTO(BaseModel):
+class RoleDTO(AntaresBaseModel):
group_id: t.Optional[str]
group_name: str
identity_id: int
type: RoleType
-class IdentityDTO(BaseModel):
+class IdentityDTO(AntaresBaseModel):
id: int
name: str
roles: t.List[RoleDTO]
-class RoleDetailDTO(BaseModel):
+class RoleDetailDTO(AntaresBaseModel):
group: GroupDTO
identity: UserInfo
type: RoleType
-class BotIdentityDTO(BaseModel):
+class BotIdentityDTO(AntaresBaseModel):
id: int
name: str
isAuthor: bool
@@ -107,7 +107,7 @@ class BotDTO(UserInfo):
is_author: bool
-class UserRoleDTO(BaseModel):
+class UserRoleDTO(AntaresBaseModel):
id: int
name: str
role: RoleType
@@ -311,7 +311,7 @@ def to_dto(self) -> RoleDetailDTO:
)
-class CredentialsDTO(BaseModel):
+class CredentialsDTO(AntaresBaseModel):
user: int
access_token: str
refresh_token: str
diff --git a/antarest/login/web.py b/antarest/login/web.py
index 5bc85c62a1..6f3968d6a9 100644
--- a/antarest/login/web.py
+++ b/antarest/login/web.py
@@ -16,13 +16,12 @@
from fastapi import APIRouter, Depends, HTTPException
from markupsafe import escape
-from pydantic import BaseModel
from antarest.core.config import Config
from antarest.core.jwt import JWTGroup, JWTUser
from antarest.core.requests import RequestParameters, UserHasNotPermissionError
from antarest.core.roles import RoleType
-from antarest.core.serialization import from_json
+from antarest.core.serialization import AntaresBaseModel, from_json
from antarest.core.utils.web import APITag
from antarest.fastapi_jwt_auth import AuthJWT
from antarest.login.auth import Auth
@@ -46,7 +45,7 @@
logger = logging.getLogger(__name__)
-class UserCredentials(BaseModel):
+class UserCredentials(AntaresBaseModel):
username: str
password: str
diff --git a/antarest/matrixstore/matrix_editor.py b/antarest/matrixstore/matrix_editor.py
index 838af83860..89bb866336 100644
--- a/antarest/matrixstore/matrix_editor.py
+++ b/antarest/matrixstore/matrix_editor.py
@@ -14,10 +14,12 @@
import operator
from typing import Any, Dict, List, Optional, Tuple
-from pydantic import BaseModel, Field, field_validator, model_validator
+from pydantic import Field, field_validator, model_validator
+from antarest.core.serialization import AntaresBaseModel
-class MatrixSlice(BaseModel):
+
+class MatrixSlice(AntaresBaseModel):
# NOTE: This Markdown documentation is reflected in the Swagger API
"""
Represents a group of cells in a matrix for updating.
@@ -97,7 +99,7 @@ def check_values(cls, values: Dict[str, Any]) -> Dict[str, Any]:
@functools.total_ordering
-class Operation(BaseModel):
+class Operation(AntaresBaseModel):
# NOTE: This Markdown documentation is reflected in the Swagger API
"""
Represents an update operation to be performed on matrix cells.
@@ -140,7 +142,7 @@ def __le__(self, other: Any) -> bool:
return NotImplemented # pragma: no cover
-class MatrixEditInstruction(BaseModel):
+class MatrixEditInstruction(AntaresBaseModel):
# NOTE: This Markdown documentation is reflected in the Swagger API
"""
Provides edit instructions to be applied to a matrix.
diff --git a/antarest/matrixstore/model.py b/antarest/matrixstore/model.py
index 244cafadca..2dccd350ab 100644
--- a/antarest/matrixstore/model.py
+++ b/antarest/matrixstore/model.py
@@ -14,11 +14,11 @@
import typing as t
import uuid
-from pydantic import BaseModel
from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, String, Table # type: ignore
from sqlalchemy.orm import relationship # type: ignore
from antarest.core.persistence import Base
+from antarest.core.serialization import AntaresBaseModel
from antarest.login.model import GroupDTO, Identity, UserInfo
@@ -58,12 +58,12 @@ def __eq__(self, other: t.Any) -> bool:
return res
-class MatrixInfoDTO(BaseModel):
+class MatrixInfoDTO(AntaresBaseModel):
id: str
name: str
-class MatrixDataSetDTO(BaseModel):
+class MatrixDataSetDTO(AntaresBaseModel):
id: str
name: str
matrices: t.List[MatrixInfoDTO]
@@ -209,7 +209,7 @@ def __eq__(self, other: t.Any) -> bool:
MatrixData = float
-class MatrixDTO(BaseModel):
+class MatrixDTO(AntaresBaseModel):
width: int
height: int
index: t.List[str]
@@ -219,7 +219,7 @@ class MatrixDTO(BaseModel):
id: str = ""
-class MatrixContent(BaseModel):
+class MatrixContent(AntaresBaseModel):
"""
Matrix content (Data Frame array)
@@ -234,7 +234,7 @@ class MatrixContent(BaseModel):
columns: t.List[t.Union[int, str]]
-class MatrixDataSetUpdateDTO(BaseModel):
+class MatrixDataSetUpdateDTO(AntaresBaseModel):
name: str
groups: t.List[str]
public: bool
diff --git a/antarest/service_creator.py b/antarest/service_creator.py
index 04d9da626a..bb2b48a9d9 100644
--- a/antarest/service_creator.py
+++ b/antarest/service_creator.py
@@ -25,7 +25,7 @@
from antarest.core.application import AppBuildContext
from antarest.core.cache.main import build_cache
-from antarest.core.config import Config
+from antarest.core.config import Config, RedisConfig
from antarest.core.filetransfer.main import build_filetransfer_service
from antarest.core.filetransfer.service import FileTransferManager
from antarest.core.interfaces.cache import ICache
@@ -34,7 +34,6 @@
from antarest.core.persistence import upgrade_db
from antarest.core.tasks.main import build_taskjob_manager
from antarest.core.tasks.service import ITaskService
-from antarest.core.utils.utils import new_redis_instance
from antarest.eventbus.main import build_eventbus
from antarest.launcher.main import build_launcher
from antarest.login.main import build_login
@@ -109,6 +108,17 @@ def init_db_engine(
return engine
+def new_redis_instance(config: RedisConfig) -> redis.Redis: # type: ignore
+ redis_client = redis.Redis(
+ host=config.host,
+ port=config.port,
+ password=config.password,
+ db=0,
+ retry_on_error=[redis.ConnectionError, redis.TimeoutError], # type: ignore
+ )
+ return redis_client # type: ignore
+
+
def create_event_bus(app_ctxt: t.Optional[AppBuildContext], config: Config) -> t.Tuple[IEventBus, t.Optional[redis.Redis]]: # type: ignore
redis_client = new_redis_instance(config.redis) if config.redis is not None else None
return (
diff --git a/antarest/study/business/adequacy_patch_management.py b/antarest/study/business/adequacy_patch_management.py
index ddc2214891..26dc1ec548 100644
--- a/antarest/study/business/adequacy_patch_management.py
+++ b/antarest/study/business/adequacy_patch_management.py
@@ -17,7 +17,7 @@
from antarest.study.business.all_optional_meta import all_optional_model
from antarest.study.business.enum_ignore_case import EnumIgnoreCase
from antarest.study.business.utils import GENERAL_DATA_PATH, FieldInfo, FormFieldsBaseModel, execute_or_add_commands
-from antarest.study.model import Study
+from antarest.study.model import STUDY_VERSION_8_3, STUDY_VERSION_8_5, Study
from antarest.study.storage.storage_service import StudyStorageService
from antarest.study.storage.variantstudy.model.command.update_config import UpdateConfig
@@ -52,47 +52,47 @@ class AdequacyPatchFormFields(FormFieldsBaseModel):
"enable_adequacy_patch": {
"path": f"{ADEQUACY_PATCH_PATH}/include-adq-patch",
"default_value": False,
- "start_version": 830,
+ "start_version": STUDY_VERSION_8_3,
},
"ntc_from_physical_areas_out_to_physical_areas_in_adequacy_patch": {
"path": f"{ADEQUACY_PATCH_PATH}/set-to-null-ntc-from-physical-out-to-physical-in-for-first-step",
"default_value": True,
- "start_version": 830,
+ "start_version": STUDY_VERSION_8_3,
},
"ntc_between_physical_areas_out_adequacy_patch": {
"path": f"{ADEQUACY_PATCH_PATH}/set-to-null-ntc-between-physical-out-for-first-step",
"default_value": True,
- "start_version": 830,
+ "start_version": STUDY_VERSION_8_3,
},
"price_taking_order": {
"path": f"{ADEQUACY_PATCH_PATH}/price-taking-order",
"default_value": PriceTakingOrder.DENS.value,
- "start_version": 850,
+ "start_version": STUDY_VERSION_8_5,
},
"include_hurdle_cost_csr": {
"path": f"{ADEQUACY_PATCH_PATH}/include-hurdle-cost-csr",
"default_value": False,
- "start_version": 850,
+ "start_version": STUDY_VERSION_8_5,
},
"check_csr_cost_function": {
"path": f"{ADEQUACY_PATCH_PATH}/check-csr-cost-function",
"default_value": False,
- "start_version": 850,
+ "start_version": STUDY_VERSION_8_5,
},
"threshold_initiate_curtailment_sharing_rule": {
"path": f"{ADEQUACY_PATCH_PATH}/threshold-initiate-curtailment-sharing-rule",
"default_value": 0.0,
- "start_version": 850,
+ "start_version": STUDY_VERSION_8_5,
},
"threshold_display_local_matching_rule_violations": {
"path": f"{ADEQUACY_PATCH_PATH}/threshold-display-local-matching-rule-violations",
"default_value": 0.0,
- "start_version": 850,
+ "start_version": STUDY_VERSION_8_5,
},
"threshold_csr_variable_bounds_relaxation": {
"path": f"{ADEQUACY_PATCH_PATH}/threshold-csr-variable-bounds-relaxation",
"default_value": 3,
- "start_version": 850,
+ "start_version": STUDY_VERSION_8_5,
},
}
@@ -113,7 +113,7 @@ def get_value(field_info: FieldInfo) -> Any:
path = field_info["path"]
start_version = field_info.get("start_version", -1)
target_name = path.split("/")[-1]
- is_in_version = file_study.config.version >= start_version # type: ignore
+ is_in_version = file_study.config.version >= start_version
return parent.get(target_name, field_info["default_value"]) if is_in_version else None
diff --git a/antarest/study/business/advanced_parameters_management.py b/antarest/study/business/advanced_parameters_management.py
index 3e68ff0aa1..87f6939a91 100644
--- a/antarest/study/business/advanced_parameters_management.py
+++ b/antarest/study/business/advanced_parameters_management.py
@@ -12,6 +12,7 @@
from typing import Any, Dict, List
+from antares.study.version import StudyVersion
from pydantic import field_validator
from pydantic.types import StrictInt, StrictStr
@@ -19,7 +20,7 @@
from antarest.study.business.all_optional_meta import all_optional_model
from antarest.study.business.enum_ignore_case import EnumIgnoreCase
from antarest.study.business.utils import GENERAL_DATA_PATH, FieldInfo, FormFieldsBaseModel, execute_or_add_commands
-from antarest.study.model import Study
+from antarest.study.model import STUDY_VERSION_8_8, Study
from antarest.study.storage.storage_service import StudyStorageService
from antarest.study.storage.variantstudy.model.command.update_config import UpdateConfig
@@ -255,7 +256,7 @@ def set_field_values(self, study: Study, field_values: AdvancedParamsFormFields)
if (
field_name == "unit_commitment_mode"
and value == UnitCommitmentMode.MILP
- and int(study.version) < 880
+ and StudyVersion.parse(study.version) < STUDY_VERSION_8_8
):
raise InvalidFieldForVersionError("Unit commitment mode `MILP` only exists in v8.8+ studies")
diff --git a/antarest/study/business/area_management.py b/antarest/study/business/area_management.py
index e49eb6a9d6..d90ec4c554 100644
--- a/antarest/study/business/area_management.py
+++ b/antarest/study/business/area_management.py
@@ -15,10 +15,11 @@
import re
import typing as t
-from pydantic import BaseModel, Field
+from pydantic import Field
from antarest.core.exceptions import ConfigFileNotFound, DuplicateAreaName, LayerNotAllowedToBeDeleted, LayerNotFound
from antarest.core.model import JSON
+from antarest.core.serialization import AntaresBaseModel
from antarest.study.business.all_optional_meta import all_optional_model, camel_case_model
from antarest.study.business.utils import execute_or_add_commands
from antarest.study.model import Patch, PatchArea, PatchCluster, RawStudy, Study
@@ -47,7 +48,7 @@ class AreaType(enum.Enum):
DISTRICT = "DISTRICT"
-class AreaCreationDTO(BaseModel):
+class AreaCreationDTO(AntaresBaseModel):
name: str
type: AreaType
metadata: t.Optional[PatchArea] = None
@@ -62,7 +63,7 @@ class ClusterInfoDTO(PatchCluster):
unitcount: int = 0
nominalcapacity: float = 0
group: t.Optional[str] = None
- min_stable_power: t.Optional[int] = None
+ min_stable_power: t.Optional[float] = None
min_up_time: t.Optional[int] = None
min_down_time: t.Optional[int] = None
spinning: t.Optional[float] = None
@@ -76,13 +77,13 @@ class AreaInfoDTO(AreaCreationDTO):
thermals: t.Optional[t.List[ClusterInfoDTO]] = None
-class LayerInfoDTO(BaseModel):
+class LayerInfoDTO(AntaresBaseModel):
id: str
name: str
areas: t.List[str]
-class UpdateAreaUi(BaseModel, extra="forbid", populate_by_name=True):
+class UpdateAreaUi(AntaresBaseModel, extra="forbid", populate_by_name=True):
"""
DTO for updating area UI
diff --git a/antarest/study/business/areas/properties_management.py b/antarest/study/business/areas/properties_management.py
index ac3dbb902a..ae11112a28 100644
--- a/antarest/study/business/areas/properties_management.py
+++ b/antarest/study/business/areas/properties_management.py
@@ -19,7 +19,7 @@
from antarest.core.exceptions import ChildNotFoundError
from antarest.study.business.all_optional_meta import all_optional_model
from antarest.study.business.utils import FieldInfo, FormFieldsBaseModel, execute_or_add_commands
-from antarest.study.model import Study
+from antarest.study.model import STUDY_VERSION_8_3, Study
from antarest.study.storage.rawstudy.model.filesystem.config.area import AdequacyPatchMode
from antarest.study.storage.storage_service import StudyStorageService
from antarest.study.storage.variantstudy.model.command.update_config import UpdateConfig
@@ -113,7 +113,7 @@ def validation(cls, values: t.Dict[str, t.Any]) -> t.Dict[str, t.Any]:
"adequacy_patch_mode": {
"path": f"{AREA_PATH}/adequacy_patch/adequacy-patch/adequacy-patch-mode",
"default_value": AdequacyPatchMode.OUTSIDE.value,
- "start_version": 830,
+ "start_version": STUDY_VERSION_8_3,
},
}
diff --git a/antarest/study/business/areas/renewable_management.py b/antarest/study/business/areas/renewable_management.py
index dbe844a056..b093aa325f 100644
--- a/antarest/study/business/areas/renewable_management.py
+++ b/antarest/study/business/areas/renewable_management.py
@@ -13,6 +13,7 @@
import collections
import typing as t
+from antares.study.version import StudyVersion
from pydantic import field_validator
from antarest.core.exceptions import DuplicateRenewableCluster, RenewableClusterConfigNotFound, RenewableClusterNotFound
@@ -82,7 +83,7 @@ def validate_name(cls, name: t.Optional[str]) -> str:
raise ValueError("name must not be empty")
return name
- def to_config(self, study_version: t.Union[str, int]) -> RenewableConfigType:
+ def to_config(self, study_version: StudyVersion) -> RenewableConfigType:
values = self.model_dump(by_alias=False, exclude_none=True)
return create_renewable_config(study_version=study_version, **values)
@@ -109,11 +110,11 @@ def json_schema_extra(schema: t.MutableMapping[str, t.Any]) -> None:
def create_renewable_output(
- study_version: t.Union[str, int],
+ study_version: str,
cluster_id: str,
config: t.Mapping[str, t.Any],
) -> "RenewableClusterOutput":
- obj = create_renewable_config(study_version=study_version, **config, id=cluster_id)
+ obj = create_renewable_config(study_version=StudyVersion.parse(study_version), **config, id=cluster_id)
kwargs = obj.model_dump(by_alias=False)
return RenewableClusterOutput(**kwargs)
@@ -182,12 +183,11 @@ def get_all_renewables_props(
except KeyError:
raise RenewableClusterConfigNotFound(path)
- study_version = study.version
renewables_by_areas: t.MutableMapping[str, t.MutableMapping[str, RenewableClusterOutput]]
renewables_by_areas = collections.defaultdict(dict)
for area_id, cluster_obj in clusters.items():
for cluster_id, cluster in cluster_obj.items():
- renewables_by_areas[area_id][cluster_id] = create_renewable_output(study_version, cluster_id, cluster)
+ renewables_by_areas[area_id][cluster_id] = create_renewable_output(study.version, cluster_id, cluster)
return renewables_by_areas
@@ -206,7 +206,7 @@ def create_cluster(
The newly created cluster.
"""
file_study = self._get_file_study(study)
- cluster = cluster_data.to_config(study.version)
+ cluster = cluster_data.to_config(StudyVersion.parse(study.version))
command = self._make_create_cluster_cmd(area_id, cluster)
execute_or_add_commands(
study,
@@ -272,7 +272,7 @@ def update_cluster(
RenewableClusterNotFound: If the cluster to update is not found.
"""
- study_version = study.version
+ study_version = StudyVersion.parse(study.version)
file_study = self._get_file_study(study)
path = _CLUSTER_PATH.format(area_id=area_id, cluster_id=cluster_id)
@@ -356,7 +356,7 @@ def duplicate_cluster(
current_cluster = self.get_cluster(study, area_id, source_id)
current_cluster.name = new_cluster_name
creation_form = RenewableClusterCreation(**current_cluster.model_dump(by_alias=False, exclude={"id"}))
- new_config = creation_form.to_config(study.version)
+ new_config = creation_form.to_config(StudyVersion.parse(study.version))
create_cluster_cmd = self._make_create_cluster_cmd(area_id, new_config)
# Matrix edition
@@ -395,7 +395,7 @@ def update_renewables_props(
# Convert the DTO to a configuration object and update the configuration file.
properties = create_renewable_config(
- study.version, **new_cluster.model_dump(by_alias=False, exclude_none=True)
+ StudyVersion.parse(study.version), **new_cluster.model_dump(by_alias=False, exclude_none=True)
)
path = _CLUSTER_PATH.format(area_id=area_id, cluster_id=renewable_id)
cmd = UpdateConfig(
diff --git a/antarest/study/business/areas/st_storage_management.py b/antarest/study/business/areas/st_storage_management.py
index 23f1052a8b..3cd4641410 100644
--- a/antarest/study/business/areas/st_storage_management.py
+++ b/antarest/study/business/areas/st_storage_management.py
@@ -15,7 +15,8 @@
import typing as t
import numpy as np
-from pydantic import BaseModel, field_validator, model_validator
+from antares.study.version import StudyVersion
+from pydantic import field_validator, model_validator
from typing_extensions import Literal
from antarest.core.exceptions import (
@@ -28,9 +29,10 @@
)
from antarest.core.model import JSON
from antarest.core.requests import CaseInsensitiveDict
+from antarest.core.serialization import AntaresBaseModel
from antarest.study.business.all_optional_meta import all_optional_model, camel_case_model
from antarest.study.business.utils import execute_or_add_commands
-from antarest.study.model import Study
+from antarest.study.model import STUDY_VERSION_8_8, Study
from antarest.study.storage.rawstudy.model.filesystem.config.model import transform_name_to_id
from antarest.study.storage.rawstudy.model.filesystem.config.st_storage import (
STStorage880Config,
@@ -85,7 +87,7 @@ def validate_name(cls, name: t.Optional[str]) -> str:
return name
# noinspection PyUnusedLocal
- def to_config(self, study_version: t.Union[str, int]) -> STStorageConfigType:
+ def to_config(self, study_version: StudyVersion) -> STStorageConfigType:
values = self.model_dump(mode="json", by_alias=False, exclude_none=True)
return create_st_storage_config(study_version=study_version, **values)
@@ -117,7 +119,7 @@ def json_schema_extra(schema: t.MutableMapping[str, t.Any]) -> None:
# =============
-class STStorageMatrix(BaseModel):
+class STStorageMatrix(AntaresBaseModel):
"""
Short-Term Storage Matrix Model.
@@ -157,7 +159,7 @@ def validate_time_series(cls, data: t.List[t.List[float]]) -> t.List[t.List[floa
# noinspection SpellCheckingInspection
-class STStorageMatrices(BaseModel):
+class STStorageMatrices(AntaresBaseModel):
"""
Short-Term Storage Matrices Validation Model.
@@ -239,7 +241,7 @@ def _get_values_by_ids(file_study: FileStudy, area_id: str) -> t.Mapping[str, t.
def create_storage_output(
- study_version: t.Union[str, int],
+ study_version: StudyVersion,
cluster_id: str,
config: t.Mapping[str, t.Any],
) -> "STStorageOutput":
@@ -283,7 +285,7 @@ def create_storage(
file_study = self._get_file_study(study)
values_by_ids = _get_values_by_ids(file_study, area_id)
- storage = form.to_config(study.version)
+ storage = form.to_config(StudyVersion.parse(study.version))
values = values_by_ids.get(storage.id)
if values is not None:
raise DuplicateSTStorage(area_id, storage.id)
@@ -333,7 +335,7 @@ def get_storages(
# Sort STStorageConfig by groups and then by name
order_by = operator.attrgetter("group", "name")
- study_version = int(study.version)
+ study_version = StudyVersion.parse(study.version)
storages = [create_storage_output(study_version, storage_id, options) for storage_id, options in config.items()]
return sorted(storages, key=order_by)
@@ -364,7 +366,7 @@ def get_all_storages_props(
except KeyError:
raise STStorageConfigNotFound(path) from None
- study_version = study.version
+ study_version = StudyVersion.parse(study.version)
storages_by_areas: t.MutableMapping[str, t.MutableMapping[str, STStorageOutput]]
storages_by_areas = collections.defaultdict(dict)
for area_id, cluster_obj in storages.items():
@@ -395,7 +397,7 @@ def update_storages_props(
# Convert the DTO to a configuration object and update the configuration file.
properties = create_st_storage_config(
- study.version, **new_cluster.model_dump(mode="json", by_alias=False, exclude_none=True)
+ StudyVersion.parse(study.version), **new_cluster.model_dump(mode="json", by_alias=False, exclude_none=True)
)
path = _STORAGE_LIST_PATH.format(area_id=area_id, storage_id=storage_id)
cmd = UpdateConfig(
@@ -434,7 +436,7 @@ def get_storage(
config = file_study.tree.get(path.split("/"), depth=1)
except KeyError:
raise STStorageNotFound(path, storage_id) from None
- return create_storage_output(int(study.version), storage_id, config)
+ return create_storage_output(StudyVersion.parse(study.version), storage_id, config)
def update_storage(
self,
@@ -454,7 +456,7 @@ def update_storage(
Returns:
Updated form of short-term storage.
"""
- study_version = study.version
+ study_version = StudyVersion.parse(study.version)
# For variants, this method requires generating a snapshot, which takes time.
# But sadly, there's no other way to prevent creating wrong commands.
@@ -549,13 +551,14 @@ def duplicate_cluster(self, study: Study, area_id: str, source_id: str, new_clus
current_cluster.name = new_cluster_name
fields_to_exclude = {"id"}
# We should remove the field 'enabled' for studies before v8.8 as it didn't exist
- if int(study.version) < 880:
+ study_version = StudyVersion.parse(study.version)
+ if study_version < STUDY_VERSION_8_8:
fields_to_exclude.add("enabled")
creation_form = STStorageCreation(
**current_cluster.model_dump(mode="json", by_alias=False, exclude=fields_to_exclude)
)
- new_config = creation_form.to_config(study.version)
+ new_config = creation_form.to_config(study_version)
create_cluster_cmd = self._make_create_cluster_cmd(area_id, new_config)
# Matrix edition
diff --git a/antarest/study/business/areas/thermal_management.py b/antarest/study/business/areas/thermal_management.py
index 74f6be680d..9775a71265 100644
--- a/antarest/study/business/areas/thermal_management.py
+++ b/antarest/study/business/areas/thermal_management.py
@@ -14,6 +14,7 @@
import typing as t
from pathlib import Path
+from antares.study.version import StudyVersion
from pydantic import field_validator
from antarest.core.exceptions import (
@@ -26,7 +27,7 @@
from antarest.core.model import JSON
from antarest.study.business.all_optional_meta import all_optional_model, camel_case_model
from antarest.study.business.utils import execute_or_add_commands
-from antarest.study.model import Study
+from antarest.study.model import STUDY_VERSION_8_7, Study
from antarest.study.storage.rawstudy.model.filesystem.config.model import transform_name_to_id
from antarest.study.storage.rawstudy.model.filesystem.config.thermal import (
Thermal870Config,
@@ -90,7 +91,7 @@ def validate_name(cls, name: t.Optional[str]) -> str:
raise ValueError("name must not be empty")
return name
- def to_config(self, study_version: t.Union[str, int]) -> ThermalConfigType:
+ def to_config(self, study_version: StudyVersion) -> ThermalConfigType:
values = self.model_dump(mode="json", by_alias=False, exclude_none=True)
return create_thermal_config(study_version=study_version, **values)
@@ -118,7 +119,7 @@ def json_schema_extra(schema: t.MutableMapping[str, t.Any]) -> None:
def create_thermal_output(
- study_version: t.Union[str, int],
+ study_version: StudyVersion,
cluster_id: str,
config: t.Mapping[str, t.Any],
) -> "ThermalClusterOutput":
@@ -172,7 +173,7 @@ def get_cluster(self, study: Study, area_id: str, cluster_id: str) -> ThermalClu
cluster = file_study.tree.get(path.split("/"), depth=1)
except KeyError:
raise ThermalClusterNotFound(path, cluster_id) from None
- study_version = study.version
+ study_version = StudyVersion.parse(study.version)
return create_thermal_output(study_version, cluster_id, cluster)
def get_clusters(
@@ -200,7 +201,7 @@ def get_clusters(
clusters = file_study.tree.get(path.split("/"), depth=3)
except KeyError:
raise ThermalClusterConfigNotFound(path, area_id) from None
- study_version = study.version
+ study_version = StudyVersion.parse(study.version)
return [create_thermal_output(study_version, cluster_id, cluster) for cluster_id, cluster in clusters.items()]
def get_all_thermals_props(
@@ -230,7 +231,7 @@ def get_all_thermals_props(
except KeyError:
raise ThermalClusterConfigNotFound(path) from None
- study_version = study.version
+ study_version = StudyVersion.parse(study.version)
thermals_by_areas: t.MutableMapping[str, t.MutableMapping[str, ThermalClusterOutput]]
thermals_by_areas = collections.defaultdict(dict)
for area_id, cluster_obj in clusters.items():
@@ -261,7 +262,7 @@ def update_thermals_props(
# Convert the DTO to a configuration object and update the configuration file.
properties = create_thermal_config(
- study.version, **new_cluster.model_dump(mode="json", by_alias=False, exclude_none=True)
+ StudyVersion.parse(study.version), **new_cluster.model_dump(mode="json", by_alias=False, exclude_none=True)
)
path = _CLUSTER_PATH.format(area_id=area_id, cluster_id=thermal_id)
cmd = UpdateConfig(
@@ -294,7 +295,7 @@ def create_cluster(self, study: Study, area_id: str, cluster_data: ThermalCluste
"""
file_study = self._get_file_study(study)
- cluster = cluster_data.to_config(study.version)
+ cluster = cluster_data.to_config(StudyVersion.parse(study.version))
command = self._make_create_cluster_cmd(area_id, cluster)
execute_or_add_commands(
study,
@@ -342,7 +343,7 @@ def update_cluster(
in the provided cluster_data.
"""
- study_version = study.version
+ study_version = StudyVersion.parse(study.version)
file_study = self._get_file_study(study)
path = _CLUSTER_PATH.format(area_id=area_id, cluster_id=cluster_id)
try:
@@ -426,7 +427,7 @@ def duplicate_cluster(
source_cluster = self.get_cluster(study, area_id, source_id)
source_cluster.name = new_cluster_name
creation_form = ThermalClusterCreation(**source_cluster.model_dump(mode="json", by_alias=False, exclude={"id"}))
- new_config = creation_form.to_config(study.version)
+ new_config = creation_form.to_config(StudyVersion.parse(study.version))
create_cluster_cmd = self._make_create_cluster_cmd(area_id, new_config)
# Matrix edition
@@ -441,7 +442,7 @@ def duplicate_cluster(
f"input/thermal/prepro/{area_id}/{lower_new_id}/modulation",
f"input/thermal/prepro/{area_id}/{lower_new_id}/data",
]
- if int(study.version) >= 870:
+ if StudyVersion.parse(study.version) >= STUDY_VERSION_8_7:
source_paths.append(f"input/thermal/series/{area_id}/{lower_source_id}/CO2Cost")
source_paths.append(f"input/thermal/series/{area_id}/{lower_source_id}/fuelCost")
new_paths.append(f"input/thermal/series/{area_id}/{lower_new_id}/CO2Cost")
@@ -464,7 +465,7 @@ def validate_series(self, study: Study, area_id: str, cluster_id: str) -> bool:
lower_cluster_id = cluster_id.lower()
thermal_cluster_path = Path(f"input/thermal/series/{area_id}/{lower_cluster_id}")
series_path = [thermal_cluster_path / "series"]
- if int(study.version) >= 870:
+ if StudyVersion.parse(study.version) >= STUDY_VERSION_8_7:
series_path.append(thermal_cluster_path / "CO2Cost")
series_path.append(thermal_cluster_path / "fuelCost")
diff --git a/antarest/study/business/binding_constraint_management.py b/antarest/study/business/binding_constraint_management.py
index 4ef2a5fffc..985123003d 100644
--- a/antarest/study/business/binding_constraint_management.py
+++ b/antarest/study/business/binding_constraint_management.py
@@ -15,7 +15,8 @@
import typing as t
import numpy as np
-from pydantic import BaseModel, Field, field_validator, model_validator
+from antares.study.version import StudyVersion
+from pydantic import Field, field_validator, model_validator
from antarest.core.exceptions import (
BindingConstraintNotFound,
@@ -30,10 +31,11 @@
)
from antarest.core.model import JSON
from antarest.core.requests import CaseInsensitiveDict
+from antarest.core.serialization import AntaresBaseModel
from antarest.core.utils.string import to_camel_case
from antarest.study.business.all_optional_meta import camel_case_model
from antarest.study.business.utils import execute_or_add_commands
-from antarest.study.model import Study
+from antarest.study.model import STUDY_VERSION_8_3, STUDY_VERSION_8_7, Study
from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import (
DEFAULT_GROUP,
DEFAULT_OPERATOR,
@@ -79,7 +81,7 @@
}
-class LinkTerm(BaseModel):
+class LinkTerm(AntaresBaseModel):
"""
DTO for a constraint term on a link between two areas.
@@ -98,7 +100,7 @@ def generate_id(self) -> str:
return "%".join(ids)
-class ClusterTerm(BaseModel):
+class ClusterTerm(AntaresBaseModel):
"""
DTO for a constraint term on a cluster in an area.
@@ -117,7 +119,7 @@ def generate_id(self) -> str:
return ".".join(ids)
-class ConstraintTerm(BaseModel):
+class ConstraintTerm(AntaresBaseModel):
"""
DTO for a constraint term.
@@ -147,7 +149,7 @@ def generate_id(self) -> str:
return self.data.generate_id()
-class ConstraintFilters(BaseModel, frozen=True, extra="forbid"):
+class ConstraintFilters(AntaresBaseModel, frozen=True, extra="forbid"):
"""
Binding Constraint Filters gathering the main filtering parameters.
@@ -461,7 +463,7 @@ def parse_and_add_terms(key: str, value: t.Any, adapted_constraint: ConstraintOu
)
@staticmethod
- def constraint_model_adapter(constraint: t.Mapping[str, t.Any], version: int) -> ConstraintOutput:
+ def constraint_model_adapter(constraint: t.Mapping[str, t.Any], study_version: StudyVersion) -> ConstraintOutput:
"""
Adapts a binding constraint configuration to the appropriate model version.
@@ -469,7 +471,7 @@ def constraint_model_adapter(constraint: t.Mapping[str, t.Any], version: int) ->
constraint: A dictionary or model representing the constraint to be adapted.
This can either be a dictionary coming from client input or an existing
model that needs reformatting.
- version: An integer indicating the target version of the study configuration. This is used to
+ study_version: A StudyVersion object indicating the target version of the study configuration. This is used to
determine which model class to instantiate and which default values to apply.
Returns:
@@ -493,19 +495,19 @@ def constraint_model_adapter(constraint: t.Mapping[str, t.Any], version: int) ->
"terms": constraint.get("terms", []),
}
- if version >= 830:
+ if study_version >= STUDY_VERSION_8_3:
_filter_year_by_year = constraint.get("filter_year_by_year") or constraint.get("filter-year-by-year", "")
_filter_synthesis = constraint.get("filter_synthesis") or constraint.get("filter-synthesis", "")
constraint_output["filter_year_by_year"] = _filter_year_by_year
constraint_output["filter_synthesis"] = _filter_synthesis
- if version >= 870:
+ if study_version >= STUDY_VERSION_8_7:
constraint_output["group"] = constraint.get("group", DEFAULT_GROUP)
# Choose the right model according to the version
adapted_constraint: ConstraintOutput
- if version >= 870:
+ if study_version >= STUDY_VERSION_8_7:
adapted_constraint = ConstraintOutput870(**constraint_output)
- elif version >= 830:
+ elif study_version >= STUDY_VERSION_8_3:
adapted_constraint = ConstraintOutput830(**constraint_output)
else:
adapted_constraint = ConstraintOutputBase(**constraint_output)
@@ -556,7 +558,7 @@ def get_binding_constraint(self, study: Study, bc_id: str) -> ConstraintOutput:
constraints_by_id: t.Dict[str, ConstraintOutput] = CaseInsensitiveDict() # type: ignore
for constraint in config.values():
- constraint_config = self.constraint_model_adapter(constraint, int(study.version))
+ constraint_config = self.constraint_model_adapter(constraint, StudyVersion.parse(study.version))
constraints_by_id[constraint_config.id] = constraint_config
if bc_id not in constraints_by_id:
@@ -580,7 +582,7 @@ def get_binding_constraints(
storage_service = self.storage_service.get_storage(study)
file_study = storage_service.get_raw(study)
config = file_study.tree.get(["input", "bindingconstraints", "bindingconstraints"])
- outputs = [self.constraint_model_adapter(c, int(study.version)) for c in config.values()]
+ outputs = [self.constraint_model_adapter(c, StudyVersion.parse(study.version)) for c in config.values()]
filtered_constraints = list(filter(lambda c: filters.match_filters(c), outputs))
return filtered_constraints
@@ -607,7 +609,7 @@ def get_grouped_constraints(self, study: Study) -> t.Mapping[str, t.Sequence[Con
grouped_constraints = CaseInsensitiveDict()
for constraint in config.values():
- constraint_config = self.constraint_model_adapter(constraint, int(study.version))
+ constraint_config = self.constraint_model_adapter(constraint, StudyVersion.parse(study.version))
constraint_group = getattr(constraint_config, "group", DEFAULT_GROUP)
grouped_constraints.setdefault(constraint_group, []).append(constraint_config)
@@ -703,7 +705,7 @@ def create_binding_constraint(
data: ConstraintCreation,
) -> ConstraintOutput:
bc_id = transform_name_to_id(data.name)
- version = int(study.version)
+ version = StudyVersion.parse(study.version)
if not bc_id:
raise InvalidConstraintName(f"Invalid binding constraint name: {data.name}.")
@@ -749,7 +751,7 @@ def update_binding_constraint(
file_study = self.storage_service.get_storage(study).get_raw(study)
existing_constraint = self.get_binding_constraint(study, binding_constraint_id)
- study_version = int(study.version)
+ study_version = StudyVersion.parse(study.version)
check_attributes_coherence(data, study_version, data.operator or existing_constraint.operator)
upd_constraint = {
@@ -784,9 +786,9 @@ def update_binding_constraint(
upd_constraint["type"] = upd_constraint.get("time_step", existing_constraint.time_step)
upd_constraint["terms"] = data.terms or existing_constraint.terms
new_fields = ["enabled", "operator", "comments", "terms"]
- if study_version >= 830:
+ if study_version >= STUDY_VERSION_8_3:
new_fields.extend(["filter_year_by_year", "filter_synthesis"])
- if study_version >= 870:
+ if study_version >= STUDY_VERSION_8_7:
new_fields.append("group")
for field in new_fields:
if field not in upd_constraint:
@@ -925,9 +927,9 @@ def get_table_schema() -> JSON:
def _replace_matrices_according_to_frequency_and_version(
- data: ConstraintInput, version: int, args: t.Dict[str, t.Any]
+ data: ConstraintInput, version: StudyVersion, args: t.Dict[str, t.Any]
) -> t.Dict[str, t.Any]:
- if version < 870:
+ if version < STUDY_VERSION_8_7:
if "values" not in args:
matrix = {
BindingConstraintFrequency.HOURLY.value: default_bc_hourly_86,
@@ -949,10 +951,10 @@ def _replace_matrices_according_to_frequency_and_version(
def check_attributes_coherence(
data: t.Union[ConstraintCreation, ConstraintInput],
- study_version: int,
+ study_version: StudyVersion,
operator: BindingConstraintOperator,
) -> None:
- if study_version < 870:
+ if study_version < STUDY_VERSION_8_7:
if data.group:
raise InvalidFieldForVersionError(
f"You cannot specify a group as your study version is older than v8.7: {data.group}"
diff --git a/antarest/study/business/district_manager.py b/antarest/study/business/district_manager.py
index c642d61531..38a760b905 100644
--- a/antarest/study/business/district_manager.py
+++ b/antarest/study/business/district_manager.py
@@ -12,9 +12,8 @@
from typing import List
-from pydantic import BaseModel
-
from antarest.core.exceptions import AreaNotFound, DistrictAlreadyExist, DistrictNotFound
+from antarest.core.serialization import AntaresBaseModel
from antarest.study.business.utils import execute_or_add_commands
from antarest.study.model import Study
from antarest.study.storage.rawstudy.model.filesystem.config.model import transform_name_to_id
@@ -24,7 +23,7 @@
from antarest.study.storage.variantstudy.model.command.update_district import UpdateDistrict
-class DistrictUpdateDTO(BaseModel):
+class DistrictUpdateDTO(AntaresBaseModel):
#: Indicates whether this district is used in the output (usually all
#: districts are visible, but the user can decide to hide some of them).
output: bool
diff --git a/antarest/study/business/general_management.py b/antarest/study/business/general_management.py
index 088f8708fe..2c1d80fdd6 100644
--- a/antarest/study/business/general_management.py
+++ b/antarest/study/business/general_management.py
@@ -17,7 +17,7 @@
from antarest.study.business.all_optional_meta import all_optional_model
from antarest.study.business.enum_ignore_case import EnumIgnoreCase
from antarest.study.business.utils import GENERAL_DATA_PATH, FieldInfo, FormFieldsBaseModel, execute_or_add_commands
-from antarest.study.model import Study
+from antarest.study.model import STUDY_VERSION_7_1, STUDY_VERSION_8, Study
from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy
from antarest.study.storage.storage_service import StudyStorageService
from antarest.study.storage.variantstudy.model.command.update_config import UpdateConfig
@@ -148,7 +148,6 @@ def day_fields_validation(cls, values: Union[Dict[str, Any], ValidationInfo]) ->
OUTPUT_PATH = f"{GENERAL_DATA_PATH}/{OUTPUT}"
BUILDING_MODE = "building_mode"
-
FIELDS_INFO: Dict[str, FieldInfo] = {
"mode": {
"path": f"{GENERAL_PATH}/mode",
@@ -201,17 +200,17 @@ def day_fields_validation(cls, values: Union[Dict[str, Any], ValidationInfo]) ->
"filtering": {
"path": f"{GENERAL_PATH}/filtering",
"default_value": False,
- "end_version": 710,
+ "end_version": STUDY_VERSION_7_1,
},
"geographic_trimming": {
"path": f"{GENERAL_PATH}/geographic-trimming",
"default_value": False,
- "start_version": 710,
+ "start_version": STUDY_VERSION_7_1,
},
"thematic_trimming": {
"path": f"{GENERAL_PATH}/thematic-trimming",
"default_value": False,
- "start_version": 710,
+ "start_version": STUDY_VERSION_7_1,
},
"simulation_synthesis": {
"path": f"{OUTPUT_PATH}/synthesis",
@@ -303,7 +302,7 @@ def __get_building_mode_update_cmds(
return [
UpdateConfig(
target=f"{GENERAL_PATH}/custom-scenario"
- if file_study.config.version >= 800
+ if file_study.config.version >= STUDY_VERSION_8
else f"{GENERAL_PATH}/custom-ts-numbers",
data=new_value == BuildingMode.CUSTOM,
command_context=cmd_context,
diff --git a/antarest/study/business/link_management.py b/antarest/study/business/link_management.py
index 65c8a415e5..54831ad8ac 100644
--- a/antarest/study/business/link_management.py
+++ b/antarest/study/business/link_management.py
@@ -12,10 +12,9 @@
import typing as t
-from pydantic import BaseModel
-
from antarest.core.exceptions import ConfigFileNotFound
from antarest.core.model import JSON
+from antarest.core.serialization import AntaresBaseModel
from antarest.study.business.all_optional_meta import all_optional_model, camel_case_model
from antarest.study.business.utils import execute_or_add_commands
from antarest.study.model import RawStudy
@@ -28,13 +27,13 @@
_ALL_LINKS_PATH = "input/links"
-class LinkUIDTO(BaseModel):
+class LinkUIDTO(AntaresBaseModel):
color: str
width: float
style: str
-class LinkInfoDTO(BaseModel):
+class LinkInfoDTO(AntaresBaseModel):
area1: str
area2: str
ui: t.Optional[LinkUIDTO] = None
diff --git a/antarest/study/business/optimization_management.py b/antarest/study/business/optimization_management.py
index cb599d12e2..a33236997b 100644
--- a/antarest/study/business/optimization_management.py
+++ b/antarest/study/business/optimization_management.py
@@ -132,7 +132,7 @@ def get_field_values(self, study: Study) -> OptimizationFormFields:
def get_value(field_info: FieldInfo) -> Any:
path = field_info["path"]
study_ver = file_study.config.version
- start_ver = cast(int, field_info.get("start_version", -1))
+ start_ver = cast(int, field_info.get("start_version", 0))
target_name = path.split("/")[-1]
is_in_version = start_ver <= study_ver
diff --git a/antarest/study/business/thematic_trimming_field_infos.py b/antarest/study/business/thematic_trimming_field_infos.py
index 06086005fd..9b2e0b06be 100644
--- a/antarest/study/business/thematic_trimming_field_infos.py
+++ b/antarest/study/business/thematic_trimming_field_infos.py
@@ -16,8 +16,11 @@
import typing as t
+from antares.study.version import StudyVersion
+
from antarest.study.business.all_optional_meta import all_optional_model
from antarest.study.business.utils import FormFieldsBaseModel
+from antarest.study.model import STUDY_VERSION_8_1, STUDY_VERSION_8_3, STUDY_VERSION_8_6, STUDY_VERSION_8_8
@all_optional_model
@@ -184,59 +187,63 @@ class ThematicTrimmingFormFields(FormFieldsBaseModel):
"cong_prob_minus": {"topic": _GENERAL, "path": "CONG. PROB -", "default_value": True},
"hurdle_cost": {"topic": _GENERAL, "path": "HURDLE COST", "default_value": True},
# since v8.1
- "res_generation_by_plant": {"topic": _GENERAL, "path": "RES generation by plant", "default_value": True, "start_version": 810},
- "misc_dtg_2": {"topic": _GENERAL, "path": "MISC. DTG 2", "default_value": True, "start_version": 810},
- "misc_dtg_3": {"topic": _GENERAL, "path": "MISC. DTG 3", "default_value": True, "start_version": 810},
- "misc_dtg_4": {"topic": _GENERAL, "path": "MISC. DTG 4", "default_value": True, "start_version": 810},
- "wind_offshore": {"topic": _GENERAL, "path": "WIND OFFSHORE", "default_value": True, "start_version": 810},
- "wind_onshore": {"topic": _GENERAL, "path": "WIND ONSHORE", "default_value": True, "start_version": 810},
- "solar_concrt": {"topic": _GENERAL, "path": "SOLAR CONCRT.", "default_value": True, "start_version": 810},
- "solar_pv": {"topic": _GENERAL, "path": "SOLAR PV", "default_value": True, "start_version": 810},
- "solar_rooft": {"topic": _GENERAL, "path": "SOLAR ROOFT", "default_value": True, "start_version": 810},
- "renw_1": {"topic": _GENERAL, "path": "RENW. 1", "default_value": True, "start_version": 810},
- "renw_2": {"topic": _GENERAL, "path": "RENW. 2", "default_value": True, "start_version": 810},
- "renw_3": {"topic": _GENERAL, "path": "RENW. 3", "default_value": True, "start_version": 810},
- "renw_4": {"topic": _GENERAL, "path": "RENW. 4", "default_value": True, "start_version": 810},
+ "res_generation_by_plant": {"topic": _GENERAL, "path": "RES generation by plant", "default_value": True, "start_version": STUDY_VERSION_8_1},
+ "misc_dtg_2": {"topic": _GENERAL, "path": "MISC. DTG 2", "default_value": True, "start_version": STUDY_VERSION_8_1},
+ "misc_dtg_3": {"topic": _GENERAL, "path": "MISC. DTG 3", "default_value": True, "start_version": STUDY_VERSION_8_1},
+ "misc_dtg_4": {"topic": _GENERAL, "path": "MISC. DTG 4", "default_value": True, "start_version": STUDY_VERSION_8_1},
+ "wind_offshore": {"topic": _GENERAL, "path": "WIND OFFSHORE", "default_value": True, "start_version": STUDY_VERSION_8_1},
+ "wind_onshore": {"topic": _GENERAL, "path": "WIND ONSHORE", "default_value": True, "start_version": STUDY_VERSION_8_1},
+ "solar_concrt": {"topic": _GENERAL, "path": "SOLAR CONCRT.", "default_value": True, "start_version": STUDY_VERSION_8_1},
+ "solar_pv": {"topic": _GENERAL, "path": "SOLAR PV", "default_value": True, "start_version": STUDY_VERSION_8_1},
+ "solar_rooft": {"topic": _GENERAL, "path": "SOLAR ROOFT", "default_value": True, "start_version": STUDY_VERSION_8_1},
+ "renw_1": {"topic": _GENERAL, "path": "RENW. 1", "default_value": True, "start_version": STUDY_VERSION_8_1},
+ "renw_2": {"topic": _GENERAL, "path": "RENW. 2", "default_value": True, "start_version": STUDY_VERSION_8_1},
+ "renw_3": {"topic": _GENERAL, "path": "RENW. 3", "default_value": True, "start_version": STUDY_VERSION_8_1},
+ "renw_4": {"topic": _GENERAL, "path": "RENW. 4", "default_value": True, "start_version": STUDY_VERSION_8_1},
# since v8.3
- "dens": {"topic": _GENERAL, "path": "DENS", "default_value": True, "start_version": 830},
- "profit_by_plant": {"topic": _GENERAL, "path": "Profit by plant", "default_value": True, "start_version": 830},
+ "dens": {"topic": _GENERAL, "path": "DENS", "default_value": True, "start_version": STUDY_VERSION_8_3},
+ "profit_by_plant": {"topic": _GENERAL, "path": "Profit by plant", "default_value": True, "start_version": STUDY_VERSION_8_3},
# topic: "Short-Term Storages"
# since v8.6
- "sts_inj_by_plant": {"topic": _SHORT_TERM_STORAGES, "path": "STS inj by plant", "default_value": True, "start_version": 860},
- "sts_withdrawal_by_plant": {"topic": _SHORT_TERM_STORAGES, "path": "STS withdrawal by plant", "default_value": True, "start_version": 860},
- "sts_lvl_by_plant": {"topic": _SHORT_TERM_STORAGES, "path": "STS lvl by plant", "default_value": True, "start_version": 860},
- "sts_cashflow_by_cluster": {"topic": _SHORT_TERM_STORAGES, "path": "STS Cashflow By Cluster", "default_value": True, "start_version": 880},
+ "sts_inj_by_plant": {"topic": _SHORT_TERM_STORAGES, "path": "STS inj by plant", "default_value": True, "start_version": STUDY_VERSION_8_6},
+ "sts_withdrawal_by_plant": {"topic": _SHORT_TERM_STORAGES, "path": "STS withdrawal by plant", "default_value": True, "start_version": STUDY_VERSION_8_6},
+ "sts_lvl_by_plant": {"topic": _SHORT_TERM_STORAGES, "path": "STS lvl by plant", "default_value": True, "start_version": STUDY_VERSION_8_6},
+ "sts_cashflow_by_cluster": {"topic": _SHORT_TERM_STORAGES, "path": "STS Cashflow By Cluster", "default_value": True, "start_version": STUDY_VERSION_8_8},
# topic: "Short-Term Storages - Group"
- "psp_open_injection": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "PSP_open_injection", "default_value": True, "start_version": 860},
- "psp_open_withdrawal": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "PSP_open_withdrawal", "default_value": True, "start_version": 860},
- "psp_open_level": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "PSP_open_level", "default_value": True, "start_version": 860},
- "psp_closed_injection": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "PSP_closed_injection", "default_value": True, "start_version": 860},
- "psp_closed_withdrawal": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "PSP_closed_withdrawal", "default_value": True, "start_version": 860},
- "psp_closed_level": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "PSP_closed_level", "default_value": True, "start_version": 860},
- "pondage_injection": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Pondage_injection", "default_value": True, "start_version": 860},
- "pondage_withdrawal": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Pondage_withdrawal", "default_value": True, "start_version": 860},
- "pondage_level": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Pondage_level", "default_value": True, "start_version": 860},
- "battery_injection": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Battery_injection", "default_value": True, "start_version": 860},
- "battery_withdrawal": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Battery_withdrawal", "default_value": True, "start_version": 860},
- "battery_level": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Battery_level", "default_value": True, "start_version": 860},
- "other1_injection": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Other1_injection", "default_value": True, "start_version": 860},
- "other1_withdrawal": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Other1_withdrawal", "default_value": True, "start_version": 860},
- "other1_level": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Other1_level", "default_value": True, "start_version": 860},
- "other2_injection": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Other2_injection", "default_value": True, "start_version": 860},
- "other2_withdrawal": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Other2_withdrawal", "default_value": True, "start_version": 860},
- "other2_level": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Other2_level", "default_value": True, "start_version": 860},
- "other3_injection": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Other3_injection", "default_value": True, "start_version": 860},
- "other3_withdrawal": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Other3_withdrawal", "default_value": True, "start_version": 860},
- "other3_level": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Other3_level", "default_value": True, "start_version": 860},
- "other4_injection": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Other4_injection", "default_value": True, "start_version": 860},
- "other4_withdrawal": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Other4_withdrawal", "default_value": True, "start_version": 860},
- "other4_level": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Other4_level", "default_value": True, "start_version": 860},
- "other5_injection": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Other5_injection", "default_value": True, "start_version": 860},
- "other5_withdrawal": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Other5_withdrawal", "default_value": True, "start_version": 860},
- "other5_level": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Other5_level", "default_value": True, "start_version": 860},
+ "psp_open_injection": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "PSP_open_injection", "default_value": True, "start_version": STUDY_VERSION_8_6},
+ "psp_open_withdrawal": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "PSP_open_withdrawal", "default_value": True, "start_version": STUDY_VERSION_8_6},
+ "psp_open_level": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "PSP_open_level", "default_value": True, "start_version": STUDY_VERSION_8_6},
+ "psp_closed_injection": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "PSP_closed_injection", "default_value": True, "start_version": STUDY_VERSION_8_6},
+ "psp_closed_withdrawal": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "PSP_closed_withdrawal", "default_value": True, "start_version": STUDY_VERSION_8_6},
+ "psp_closed_level": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "PSP_closed_level", "default_value": True, "start_version": STUDY_VERSION_8_6},
+ "pondage_injection": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Pondage_injection", "default_value": True, "start_version": STUDY_VERSION_8_6},
+ "pondage_withdrawal": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Pondage_withdrawal", "default_value": True, "start_version": STUDY_VERSION_8_6},
+ "pondage_level": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Pondage_level", "default_value": True, "start_version": STUDY_VERSION_8_6},
+ "battery_injection": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Battery_injection", "default_value": True, "start_version": STUDY_VERSION_8_6},
+ "battery_withdrawal": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Battery_withdrawal", "default_value": True, "start_version": STUDY_VERSION_8_6},
+ "battery_level": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Battery_level", "default_value": True, "start_version": STUDY_VERSION_8_6},
+ "other1_injection": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Other1_injection", "default_value": True, "start_version": STUDY_VERSION_8_6},
+ "other1_withdrawal": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Other1_withdrawal", "default_value": True, "start_version": STUDY_VERSION_8_6},
+ "other1_level": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Other1_level", "default_value": True, "start_version": STUDY_VERSION_8_6},
+ "other2_injection": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Other2_injection", "default_value": True, "start_version": STUDY_VERSION_8_6},
+ "other2_withdrawal": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Other2_withdrawal", "default_value": True, "start_version": STUDY_VERSION_8_6},
+ "other2_level": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Other2_level", "default_value": True, "start_version": STUDY_VERSION_8_6},
+ "other3_injection": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Other3_injection", "default_value": True, "start_version": STUDY_VERSION_8_6},
+ "other3_withdrawal": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Other3_withdrawal", "default_value": True, "start_version": STUDY_VERSION_8_6},
+ "other3_level": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Other3_level", "default_value": True, "start_version": STUDY_VERSION_8_6},
+ "other4_injection": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Other4_injection", "default_value": True, "start_version": STUDY_VERSION_8_6},
+ "other4_withdrawal": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Other4_withdrawal", "default_value": True, "start_version": STUDY_VERSION_8_6},
+ "other4_level": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Other4_level", "default_value": True, "start_version": STUDY_VERSION_8_6},
+ "other5_injection": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Other5_injection", "default_value": True, "start_version": STUDY_VERSION_8_6},
+ "other5_withdrawal": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Other5_withdrawal", "default_value": True, "start_version": STUDY_VERSION_8_6},
+ "other5_level": {"topic": _SHORT_TERM_STORAGES_GROUP, "path": "Other5_level", "default_value": True, "start_version": STUDY_VERSION_8_6},
# fmt: on
}
-def get_fields_info(study_version: int) -> t.Mapping[str, t.Mapping[str, t.Any]]:
- return {key: info for key, info in FIELDS_INFO.items() if (info.get("start_version") or 0) <= study_version}
+def get_fields_info(study_version: StudyVersion) -> t.Mapping[str, t.Mapping[str, t.Any]]:
+ return {
+ key: info
+ for key, info in FIELDS_INFO.items()
+ if (info.get("start_version") or StudyVersion.parse(0)) <= study_version
+ }
diff --git a/antarest/study/business/thematic_trimming_management.py b/antarest/study/business/thematic_trimming_management.py
index 74325215dd..d0ada30b65 100644
--- a/antarest/study/business/thematic_trimming_management.py
+++ b/antarest/study/business/thematic_trimming_management.py
@@ -12,6 +12,8 @@
import typing as t
+from antares.study.version import StudyVersion
+
from antarest.study.business.thematic_trimming_field_infos import ThematicTrimmingFormFields, get_fields_info
from antarest.study.business.utils import GENERAL_DATA_PATH, execute_or_add_commands
from antarest.study.model import Study
@@ -34,13 +36,13 @@ def get_field_values(self, study: Study) -> ThematicTrimmingFormFields:
include_vars = trimming_config.get("select_var +") or []
selected_vars_reset = trimming_config.get("selected_vars_reset", True)
- def get_value(field_info: t.Mapping[str, t.Any]) -> t.Any:
+ def get_value(field_info: t.Mapping[str, t.Any]) -> bool:
if selected_vars_reset is None:
- return field_info["default_value"]
+ return t.cast(bool, field_info["default_value"])
var_name = field_info["path"]
return var_name not in exclude_vars if selected_vars_reset else var_name in include_vars
- fields_info = get_fields_info(int(study.version))
+ fields_info = get_fields_info(StudyVersion.parse(study.version))
fields_values = {name: get_value(info) for name, info in fields_info.items()}
return ThematicTrimmingFormFields(**fields_values)
@@ -52,7 +54,7 @@ def set_field_values(self, study: Study, field_values: ThematicTrimmingFormField
field_values_dict = field_values.model_dump(mode="json")
keys_by_bool: t.Dict[bool, t.List[t.Any]] = {True: [], False: []}
- fields_info = get_fields_info(int(study.version))
+ fields_info = get_fields_info(StudyVersion.parse(study.version))
for name, info in fields_info.items():
keys_by_bool[field_values_dict[name]].append(info["path"])
diff --git a/antarest/study/business/timeseries_config_management.py b/antarest/study/business/timeseries_config_management.py
index 20f389a6aa..8b01bfe24e 100644
--- a/antarest/study/business/timeseries_config_management.py
+++ b/antarest/study/business/timeseries_config_management.py
@@ -18,7 +18,7 @@
from antarest.study.business.all_optional_meta import all_optional_model
from antarest.study.business.enum_ignore_case import EnumIgnoreCase
from antarest.study.business.utils import GENERAL_DATA_PATH, FormFieldsBaseModel, execute_or_add_commands
-from antarest.study.model import Study
+from antarest.study.model import STUDY_VERSION_8_1, STUDY_VERSION_8_2, Study
from antarest.study.storage.rawstudy.model.filesystem.config.model import EnrModelling
from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy
from antarest.study.storage.storage_service import StudyStorageService
@@ -210,7 +210,9 @@ def __get_form_fields_for_type(
config = file_study.config
study_version = config.version
- has_renewables = config.version >= 810 and EnrModelling(config.enr_modelling) == EnrModelling.CLUSTERS
+ has_renewables = (
+ study_version >= STUDY_VERSION_8_1 and EnrModelling(config.enr_modelling) == EnrModelling.CLUSTERS
+ )
if ts_type == TSType.RENEWABLES and not has_renewables:
return None
@@ -218,7 +220,7 @@ def __get_form_fields_for_type(
if ts_type in [TSType.WIND, TSType.SOLAR] and has_renewables:
return None
- if ts_type == TSType.NTC and study_version < 820:
+ if ts_type == TSType.NTC and study_version < STUDY_VERSION_8_2:
return None
is_special_type = ts_type == TSType.RENEWABLES or ts_type == TSType.NTC
diff --git a/antarest/study/business/utils.py b/antarest/study/business/utils.py
index 1afacecc04..58682fd596 100644
--- a/antarest/study/business/utils.py
+++ b/antarest/study/business/utils.py
@@ -12,6 +12,7 @@
import typing as t
+from antares.study.version import StudyVersion
from pydantic import BaseModel
from antarest.core.exceptions import CommandApplicationError
@@ -84,8 +85,8 @@ class FormFieldsBaseModel(
class FieldInfo(t.TypedDict, total=False):
path: str
default_value: t.Any
- start_version: t.Optional[int]
- end_version: t.Optional[int]
+ start_version: t.Optional[StudyVersion]
+ end_version: t.Optional[StudyVersion]
# Workaround to replace Pydantic computed values which are ignored by FastAPI.
# TODO: check @computed_field available in Pydantic v2 to remove it
# (value) -> encoded_value
diff --git a/antarest/study/business/xpansion_management.py b/antarest/study/business/xpansion_management.py
index f34b7f3497..02e1fc795c 100644
--- a/antarest/study/business/xpansion_management.py
+++ b/antarest/study/business/xpansion_management.py
@@ -19,10 +19,11 @@
import zipfile
from fastapi import HTTPException, UploadFile
-from pydantic import BaseModel, Field, ValidationError, field_validator, model_validator
+from pydantic import Field, ValidationError, field_validator, model_validator
from antarest.core.exceptions import BadZipBinary, ChildNotFoundError
from antarest.core.model import JSON
+from antarest.core.serialization import AntaresBaseModel
from antarest.study.business.all_optional_meta import all_optional_model
from antarest.study.business.enum_ignore_case import EnumIgnoreCase
from antarest.study.model import Study
@@ -55,7 +56,7 @@ class Solver(EnumIgnoreCase):
XPRESS = "Xpress"
-class XpansionSensitivitySettings(BaseModel):
+class XpansionSensitivitySettings(AntaresBaseModel):
"""
A DTO representing the sensitivity analysis settings used for Xpansion.
@@ -76,7 +77,7 @@ def projection_validation(cls, v: t.Optional[t.Sequence[str]]) -> t.Sequence[str
return [] if v is None else v
-class XpansionSettings(BaseModel, extra="ignore", validate_assignment=True, populate_by_name=True):
+class XpansionSettings(AntaresBaseModel, extra="ignore", validate_assignment=True, populate_by_name=True):
"""
A data transfer object representing the general settings used for Xpansion.
@@ -230,7 +231,7 @@ class UpdateXpansionSettings(XpansionSettings):
)
-class XpansionCandidateDTO(BaseModel):
+class XpansionCandidateDTO(AntaresBaseModel):
# The id of the candidate is irrelevant, so it should stay hidden for the user
# The names should be the section titles of the file, and the id should be removed
name: str
diff --git a/antarest/study/model.py b/antarest/study/model.py
index b7e7435f6f..aec84fcc7d 100644
--- a/antarest/study/model.py
+++ b/antarest/study/model.py
@@ -18,7 +18,8 @@
from datetime import datetime, timedelta
from pathlib import Path
-from pydantic import BaseModel, field_validator
+from antares.study.version import StudyVersion
+from pydantic import field_serializer, field_validator
from sqlalchemy import ( # type: ignore
Boolean,
Column,
@@ -34,6 +35,7 @@
from antarest.core.exceptions import ShouldNotHappenException
from antarest.core.model import PublicMode
from antarest.core.persistence import Base
+from antarest.core.serialization import AntaresBaseModel
from antarest.login.model import Group, GroupDTO, Identity
from antarest.study.css4_colors import COLOR_NAMES
@@ -43,26 +45,44 @@
DEFAULT_WORKSPACE_NAME = "default"
-STUDY_REFERENCE_TEMPLATES: t.Mapping[str, str] = {
- "600": "empty_study_613.zip",
- "610": "empty_study_613.zip",
- "640": "empty_study_613.zip",
- "700": "empty_study_700.zip",
- "710": "empty_study_710.zip",
- "720": "empty_study_720.zip",
- "800": "empty_study_803.zip",
- "810": "empty_study_810.zip",
- "820": "empty_study_820.zip",
- "830": "empty_study_830.zip",
- "840": "empty_study_840.zip",
- "850": "empty_study_850.zip",
- "860": "empty_study_860.zip",
- "870": "empty_study_870.zip",
- "880": "empty_study_880.zip",
+NEW_DEFAULT_STUDY_VERSION: StudyVersion = StudyVersion.parse("8.8")
+STUDY_VERSION_6_0 = StudyVersion.parse("6.0")
+STUDY_VERSION_6_1 = StudyVersion.parse("6.1")
+STUDY_VERSION_6_4 = StudyVersion.parse("6.4")
+STUDY_VERSION_6_5 = StudyVersion.parse("6.5")
+STUDY_VERSION_7_0 = StudyVersion.parse("7.0")
+STUDY_VERSION_7_1 = StudyVersion.parse("7.1")
+STUDY_VERSION_7_2 = StudyVersion.parse("7.2")
+STUDY_VERSION_8 = StudyVersion.parse("8.0")
+STUDY_VERSION_8_1 = StudyVersion.parse("8.1")
+STUDY_VERSION_8_2 = StudyVersion.parse("8.2")
+STUDY_VERSION_8_3 = StudyVersion.parse("8.3")
+STUDY_VERSION_8_4 = StudyVersion.parse("8.4")
+STUDY_VERSION_8_5 = StudyVersion.parse("8.5")
+STUDY_VERSION_8_6 = StudyVersion.parse("8.6")
+STUDY_VERSION_8_7 = StudyVersion.parse("8.7")
+STUDY_VERSION_8_8 = NEW_DEFAULT_STUDY_VERSION
+STUDY_VERSION_9_1 = StudyVersion.parse("9.1")
+STUDY_VERSION_9_2 = StudyVersion.parse("9.2")
+
+STUDY_REFERENCE_TEMPLATES: t.Mapping[StudyVersion, str] = {
+ STUDY_VERSION_6_0: "empty_study_613.zip",
+ STUDY_VERSION_6_1: "empty_study_613.zip",
+ STUDY_VERSION_6_4: "empty_study_613.zip",
+ STUDY_VERSION_7_0: "empty_study_700.zip",
+ STUDY_VERSION_7_1: "empty_study_710.zip",
+ STUDY_VERSION_7_2: "empty_study_720.zip",
+ STUDY_VERSION_8: "empty_study_803.zip",
+ STUDY_VERSION_8_1: "empty_study_810.zip",
+ STUDY_VERSION_8_2: "empty_study_820.zip",
+ STUDY_VERSION_8_3: "empty_study_830.zip",
+ STUDY_VERSION_8_4: "empty_study_840.zip",
+ STUDY_VERSION_8_5: "empty_study_850.zip",
+ STUDY_VERSION_8_6: "empty_study_860.zip",
+ STUDY_VERSION_8_7: "empty_study_870.zip",
+ STUDY_VERSION_8_8: "empty_study_880.zip",
}
-NEW_DEFAULT_STUDY_VERSION: str = "880"
-
class StudyGroup(Base): # type:ignore
"""
@@ -150,7 +170,7 @@ class StudyContentStatus(enum.Enum):
ERROR = "ERROR"
-class CommentsDto(BaseModel):
+class CommentsDto(AntaresBaseModel):
comments: str
@@ -299,7 +319,7 @@ class StudyFolder:
groups: t.List[Group]
-class PatchStudy(BaseModel):
+class PatchStudy(AntaresBaseModel):
scenario: t.Optional[str] = None
doc: t.Optional[str] = None
status: t.Optional[str] = None
@@ -307,12 +327,12 @@ class PatchStudy(BaseModel):
tags: t.List[str] = []
-class PatchArea(BaseModel):
+class PatchArea(AntaresBaseModel):
country: t.Optional[str] = None
tags: t.List[str] = []
-class PatchCluster(BaseModel):
+class PatchCluster(AntaresBaseModel):
type: t.Optional[str] = None
code_oi: t.Optional[str] = None
@@ -322,26 +342,26 @@ def alias_generator(cls, string: str) -> str:
return "-".join(string.split("_"))
-class PatchOutputs(BaseModel):
+class PatchOutputs(AntaresBaseModel):
reference: t.Optional[str] = None
-class Patch(BaseModel):
+class Patch(AntaresBaseModel):
study: t.Optional[PatchStudy] = None
areas: t.Optional[t.Dict[str, PatchArea]] = None
thermal_clusters: t.Optional[t.Dict[str, PatchCluster]] = None
outputs: t.Optional[PatchOutputs] = None
-class OwnerInfo(BaseModel):
+class OwnerInfo(AntaresBaseModel):
id: t.Optional[int] = None
name: str
-class StudyMetadataDTO(BaseModel):
+class StudyMetadataDTO(AntaresBaseModel):
id: str
name: str
- version: int
+ version: StudyVersion
created: str
updated: str
type: str
@@ -358,13 +378,21 @@ class StudyMetadataDTO(BaseModel):
folder: t.Optional[str] = None
tags: t.List[str] = []
+ @field_serializer("version")
+ def serialize_version(self, version: StudyVersion) -> int:
+ return version.__int__()
+
@field_validator("horizon", mode="before")
def transform_horizon_to_str(cls, val: t.Union[str, int, None]) -> t.Optional[str]:
# horizon can be an int.
return str(val) if val else val # type: ignore
+ @field_validator("version", mode="before")
+ def _validate_version(cls, v: t.Any) -> StudyVersion:
+ return StudyVersion.parse(v)
+
-class StudyMetadataPatchDTO(BaseModel):
+class StudyMetadataPatchDTO(AntaresBaseModel):
name: t.Optional[str] = None
author: t.Optional[str] = None
horizon: t.Optional[str] = None
@@ -387,7 +415,7 @@ def _normalize_tags(cls, v: t.List[str]) -> t.List[str]:
return tags
-class StudySimSettingsDTO(BaseModel):
+class StudySimSettingsDTO(AntaresBaseModel):
general: t.Dict[str, t.Any]
input: t.Dict[str, t.Any]
output: t.Dict[str, t.Any]
@@ -398,7 +426,7 @@ class StudySimSettingsDTO(BaseModel):
playlist: t.Optional[t.List[int]] = None
-class StudySimResultDTO(BaseModel):
+class StudySimResultDTO(AntaresBaseModel):
name: str
type: str
settings: StudySimSettingsDTO
@@ -478,7 +506,7 @@ def suffix(self) -> str:
return mapping[self]
-class StudyDownloadDTO(BaseModel):
+class StudyDownloadDTO(AntaresBaseModel):
"""
DTO used to download outputs
"""
@@ -494,32 +522,32 @@ class StudyDownloadDTO(BaseModel):
includeClusters: bool = False
-class MatrixIndex(BaseModel):
+class MatrixIndex(AntaresBaseModel):
start_date: str = ""
steps: int = 8760
first_week_size: int = 7
level: StudyDownloadLevelDTO = StudyDownloadLevelDTO.HOURLY
-class TimeSerie(BaseModel):
+class TimeSerie(AntaresBaseModel):
name: str
unit: str
data: t.List[t.Optional[float]] = []
-class TimeSeriesData(BaseModel):
+class TimeSeriesData(AntaresBaseModel):
type: StudyDownloadType
name: str
data: t.Dict[str, t.List[TimeSerie]] = {}
-class MatrixAggregationResultDTO(BaseModel):
+class MatrixAggregationResultDTO(AntaresBaseModel):
index: MatrixIndex
data: t.List[TimeSeriesData]
warnings: t.List[str]
-class MatrixAggregationResult(BaseModel):
+class MatrixAggregationResult(AntaresBaseModel):
index: MatrixIndex
data: t.Dict[t.Tuple[StudyDownloadType, str], t.Dict[str, t.List[TimeSerie]]]
warnings: t.List[str]
@@ -539,6 +567,6 @@ def to_dto(self) -> MatrixAggregationResultDTO:
)
-class ReferenceStudy(BaseModel):
+class ReferenceStudy(AntaresBaseModel):
version: str
template_name: str
diff --git a/antarest/study/repository.py b/antarest/study/repository.py
index 3b21511e0b..6ecad45df7 100644
--- a/antarest/study/repository.py
+++ b/antarest/study/repository.py
@@ -14,7 +14,7 @@
import enum
import typing as t
-from pydantic import BaseModel, NonNegativeInt
+from pydantic import NonNegativeInt
from sqlalchemy import and_, func, not_, or_, sql # type: ignore
from sqlalchemy.orm import Query, Session, joinedload, with_polymorphic # type: ignore
@@ -22,6 +22,7 @@
from antarest.core.jwt import JWTUser
from antarest.core.model import PublicMode
from antarest.core.requests import RequestParameters
+from antarest.core.serialization import AntaresBaseModel
from antarest.core.utils.fastapi_sqlalchemy import db
from antarest.login.model import Group
from antarest.study.model import DEFAULT_WORKSPACE_NAME, RawStudy, Study, StudyAdditionalData, Tag
@@ -47,7 +48,7 @@ def escape_like(string: str, escape_char: str = "\\") -> str:
return string.replace(escape_char, escape_char * 2).replace("%", escape_char + "%").replace("_", escape_char + "_")
-class AccessPermissions(BaseModel, frozen=True, extra="forbid"):
+class AccessPermissions(AntaresBaseModel, frozen=True, extra="forbid"):
"""
This class object is build to pass on the user identity and its associated groups information
into the listing function get_all below
@@ -84,7 +85,7 @@ def from_params(cls, params: t.Union[RequestParameters, JWTUser]) -> "AccessPerm
return cls()
-class StudyFilter(BaseModel, frozen=True, extra="forbid"):
+class StudyFilter(AntaresBaseModel, frozen=True, extra="forbid"):
"""Study filter class gathering the main filtering parameters
Attributes:
@@ -127,7 +128,7 @@ class StudySortBy(enum.StrEnum):
DATE_DESC = "-date"
-class StudyPagination(BaseModel, frozen=True, extra="forbid"):
+class StudyPagination(AntaresBaseModel, frozen=True, extra="forbid"):
"""
Pagination of a studies query results
diff --git a/antarest/study/service.py b/antarest/study/service.py
index 65e3c474e4..016c2fbee4 100644
--- a/antarest/study/service.py
+++ b/antarest/study/service.py
@@ -744,7 +744,7 @@ def create_study(
path=str(study_path),
created_at=datetime.utcnow(),
updated_at=datetime.utcnow(),
- version=version or NEW_DEFAULT_STUDY_VERSION,
+ version=version or f"{NEW_DEFAULT_STUDY_VERSION:ddd}",
additional_data=StudyAdditionalData(author=author),
)
@@ -2154,7 +2154,7 @@ def _analyse_study(self, metadata: Study) -> StudyContentStatus:
# noinspection PyUnusedLocal
@staticmethod
def get_studies_versions(params: RequestParameters) -> t.List[str]:
- return list(STUDY_REFERENCE_TEMPLATES)
+ return [f"{v:ddd}" for v in STUDY_REFERENCE_TEMPLATES]
def create_xpansion_configuration(
self,
diff --git a/antarest/study/storage/abstract_storage_service.py b/antarest/study/storage/abstract_storage_service.py
index 967ece3ca6..d8d8993e2e 100644
--- a/antarest/study/storage/abstract_storage_service.py
+++ b/antarest/study/storage/abstract_storage_service.py
@@ -109,7 +109,7 @@ def get_study_information(
return StudyMetadataDTO(
id=study.id,
name=study.name,
- version=int(study.version),
+ version=study.version,
created=str(study.created_at),
updated=str(study.updated_at),
workspace=study_workspace,
diff --git a/antarest/study/storage/matrix_profile.py b/antarest/study/storage/matrix_profile.py
index cd3780ae32..40dc42a6b0 100644
--- a/antarest/study/storage/matrix_profile.py
+++ b/antarest/study/storage/matrix_profile.py
@@ -18,6 +18,8 @@
import pandas as pd
+from antarest.study.model import STUDY_VERSION_8_2, STUDY_VERSION_8_6, STUDY_VERSION_8_7
+
class _MatrixProfile(t.NamedTuple):
"""
@@ -144,10 +146,10 @@ def _process_links_columns(self, matrix_path: str) -> t.Sequence[str]:
),
}
-_SPECIFIC_MATRICES_820 = copy.deepcopy(_SPECIFIC_MATRICES)
+_SPECIFIC_MATRICES_8_2 = copy.deepcopy(_SPECIFIC_MATRICES)
"""Specific matrices for study version 8.2."""
-_SPECIFIC_MATRICES_820["input/links/*/*"] = _MatrixProfile(
+_SPECIFIC_MATRICES_8_2["input/links/*/*"] = _MatrixProfile(
cols=[
"Hurdle costs direct",
"Hurdle costs indirect",
@@ -160,19 +162,19 @@ def _process_links_columns(self, matrix_path: str) -> t.Sequence[str]:
)
# Specific matrices for study version 8.6
-_SPECIFIC_MATRICES_860 = copy.deepcopy(_SPECIFIC_MATRICES_820)
+_SPECIFIC_MATRICES_8_6 = copy.deepcopy(_SPECIFIC_MATRICES_8_2)
"""Specific matrices for study version 8.6."""
# noinspection SpellCheckingInspection
#
-_SPECIFIC_MATRICES_860["input/hydro/series/*/mingen"] = _MatrixProfile(cols=[], rows=[])
+_SPECIFIC_MATRICES_8_6["input/hydro/series/*/mingen"] = _MatrixProfile(cols=[], rows=[])
-_SPECIFIC_MATRICES_870 = copy.deepcopy(_SPECIFIC_MATRICES_820)
+_SPECIFIC_MATRICES_8_7 = copy.deepcopy(_SPECIFIC_MATRICES_8_2)
"""Specific matrices for study version 8.7."""
# noinspection SpellCheckingInspection
# Scenarized RHS for binding constraints
-_SPECIFIC_MATRICES_870["input/bindingconstraints/*"] = _MatrixProfile(cols=[], rows=[])
+_SPECIFIC_MATRICES_8_7["input/bindingconstraints/*"] = _MatrixProfile(cols=[], rows=[])
def adjust_matrix_columns_index(
@@ -191,14 +193,14 @@ def adjust_matrix_columns_index(
study_version: The version of the study.
"""
# Get the matrix profiles for a given study version
- if study_version < 820:
+ if study_version < STUDY_VERSION_8_2:
matrix_profiles = _SPECIFIC_MATRICES
- elif study_version < 860:
- matrix_profiles = _SPECIFIC_MATRICES_820
- elif study_version < 870:
- matrix_profiles = _SPECIFIC_MATRICES_860
+ elif study_version < STUDY_VERSION_8_6:
+ matrix_profiles = _SPECIFIC_MATRICES_8_2
+ elif study_version < STUDY_VERSION_8_7:
+ matrix_profiles = _SPECIFIC_MATRICES_8_6
else:
- matrix_profiles = _SPECIFIC_MATRICES_870
+ matrix_profiles = _SPECIFIC_MATRICES_8_7
# Apply the matrix profile to the dataframe to adjust the column names and index
for pattern, matrix_profile in matrix_profiles.items():
diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/cluster.py b/antarest/study/storage/rawstudy/model/filesystem/config/cluster.py
index 62393794e2..f2a6349d90 100644
--- a/antarest/study/storage/rawstudy/model/filesystem/config/cluster.py
+++ b/antarest/study/storage/rawstudy/model/filesystem/config/cluster.py
@@ -19,12 +19,14 @@
import functools
import typing as t
-from pydantic import BaseModel, Field
+from pydantic import Field
+
+from antarest.core.serialization import AntaresBaseModel
@functools.total_ordering
class ItemProperties(
- BaseModel,
+ AntaresBaseModel,
extra="forbid",
validate_assignment=True,
populate_by_name=True,
diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/files.py b/antarest/study/storage/rawstudy/model/filesystem/config/files.py
index 6cfed5b9bc..73e67b1976 100644
--- a/antarest/study/storage/rawstudy/model/filesystem/config/files.py
+++ b/antarest/study/storage/rawstudy/model/filesystem/config/files.py
@@ -20,8 +20,11 @@
from enum import Enum
from pathlib import Path
+from antares.study.version import StudyVersion
+
from antarest.core.model import JSON
from antarest.core.serialization import from_json
+from antarest.study.model import STUDY_VERSION_8_1, STUDY_VERSION_8_6
from antarest.study.storage.rawstudy.ini_reader import IniReader
from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import (
DEFAULT_GROUP,
@@ -191,14 +194,16 @@ def _extract_data_from_file(
raise NotImplementedError(file_type)
-def _parse_version(path: Path) -> int:
+def _parse_version(path: Path) -> StudyVersion:
study_info = _extract_data_from_file(
root=path,
inside_root_path=Path("study.antares"),
file_type=FileType.SIMPLE_INI,
)
- version: int = study_info.get("antares", {}).get("version", -1)
- return version
+ version = study_info.get("antares", {}).get("version", 0)
+ if isinstance(version, float): # study 9.0 or newer
+ version = str(version)
+ return StudyVersion.parse(version)
def _parse_parameters(path: Path) -> t.Tuple[bool, t.List[str], str]:
@@ -467,7 +472,7 @@ def _parse_renewables(root: Path, area: str) -> t.List[RenewableConfigType]:
# Before version 8.1, we only have "Load", "Wind" and "Solar" objects.
# We can't use renewable clusters.
version = _parse_version(root)
- if version < 810:
+ if version < STUDY_VERSION_8_1:
return []
# Since version 8.1 of the solver, we can use "renewable clusters" objects.
@@ -494,7 +499,7 @@ def _parse_st_storage(root: Path, area: str) -> t.List[STStorageConfigType]:
# st_storage feature exists only since 8.6 version
version = _parse_version(root)
- if version < 860:
+ if version < STUDY_VERSION_8_6:
return []
relpath = Path(f"input/st-storage/clusters/{area}/list.ini")
diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/identifier.py b/antarest/study/storage/rawstudy/model/filesystem/config/identifier.py
index 2cb5d9ec64..ab428fca75 100644
--- a/antarest/study/storage/rawstudy/model/filesystem/config/identifier.py
+++ b/antarest/study/storage/rawstudy/model/filesystem/config/identifier.py
@@ -12,13 +12,15 @@
import typing as t
-from pydantic import BaseModel, Field, model_validator
+from pydantic import Field, model_validator
__all__ = ("IgnoreCaseIdentifier", "LowerCaseIdentifier")
+from antarest.core.serialization import AntaresBaseModel
+
class IgnoreCaseIdentifier(
- BaseModel,
+ AntaresBaseModel,
extra="forbid",
validate_assignment=True,
populate_by_name=True,
diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/model.py b/antarest/study/storage/rawstudy/model/filesystem/config/model.py
index d0abd57710..f707ab4027 100644
--- a/antarest/study/storage/rawstudy/model/filesystem/config/model.py
+++ b/antarest/study/storage/rawstudy/model/filesystem/config/model.py
@@ -14,8 +14,10 @@
import typing as t
from pathlib import Path
-from pydantic import BaseModel, Field, model_validator
+from antares.study.version import StudyVersion
+from pydantic import Field, field_serializer, field_validator, model_validator
+from antarest.core.serialization import AntaresBaseModel
from antarest.core.utils.utils import DTO
from antarest.study.business.enum_ignore_case import EnumIgnoreCase
@@ -49,7 +51,7 @@ def __str__(self) -> str:
return self.value
-class Link(BaseModel, extra="ignore"):
+class Link(AntaresBaseModel, extra="ignore"):
"""
Object linked to /input/links//properties.ini information
@@ -74,7 +76,7 @@ def validation(cls, values: t.MutableMapping[str, t.Any]) -> t.MutableMapping[st
return values
-class Area(BaseModel, extra="forbid"):
+class Area(AntaresBaseModel, extra="forbid"):
"""
Object linked to /input//optimization.ini information
"""
@@ -89,7 +91,7 @@ class Area(BaseModel, extra="forbid"):
st_storages: t.List[STStorageConfigType] = []
-class DistrictSet(BaseModel):
+class DistrictSet(AntaresBaseModel):
"""
Object linked to /inputs/sets.ini information
"""
@@ -108,7 +110,7 @@ def get_areas(self, all_areas: t.List[str]) -> t.List[str]:
return self.areas or []
-class Simulation(BaseModel):
+class Simulation(AntaresBaseModel):
"""
Object linked to /output//about-the-study/** information
"""
@@ -130,7 +132,7 @@ def get_file(self) -> str:
return f"{self.date}{modes[self.mode]}{dash}{self.name}"
-class BindingConstraintDTO(BaseModel):
+class BindingConstraintDTO(AntaresBaseModel):
"""
Object linked to `input/bindingconstraints/bindingconstraints.ini` information
@@ -162,7 +164,7 @@ def __init__(
study_path: Path,
path: Path,
study_id: str,
- version: int,
+ version: StudyVersion,
output_path: t.Optional[Path] = None,
areas: t.Optional[t.Dict[str, Area]] = None,
sets: t.Optional[t.Dict[str, DistrictSet]] = None,
@@ -302,11 +304,11 @@ def transform_name_to_id(name: str, lower: bool = True) -> str:
return valid_id.lower() if lower else valid_id
-class FileStudyTreeConfigDTO(BaseModel):
+class FileStudyTreeConfigDTO(AntaresBaseModel):
study_path: Path
path: Path
study_id: str
- version: int
+ version: StudyVersion
output_path: t.Optional[Path] = None
areas: t.Dict[str, Area] = dict()
sets: t.Dict[str, DistrictSet] = dict()
@@ -317,6 +319,14 @@ class FileStudyTreeConfigDTO(BaseModel):
enr_modelling: str = str(EnrModelling.AGGREGATED)
zip_path: t.Optional[Path] = None
+ @field_serializer("version")
+ def serialize_version(self, version: StudyVersion) -> int:
+ return version.__int__()
+
+ @field_validator("version", mode="before")
+ def _validate_version(cls, v: t.Any) -> StudyVersion:
+ return StudyVersion.parse(v)
+
@staticmethod
def from_build_config(
config: FileStudyTreeConfig,
diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/renewable.py b/antarest/study/storage/rawstudy/model/filesystem/config/renewable.py
index b1acfe4492..e796ed1beb 100644
--- a/antarest/study/storage/rawstudy/model/filesystem/config/renewable.py
+++ b/antarest/study/storage/rawstudy/model/filesystem/config/renewable.py
@@ -12,9 +12,11 @@
import typing as t
+from antares.study.version import StudyVersion
from pydantic import Field
from antarest.study.business.enum_ignore_case import EnumIgnoreCase
+from antarest.study.model import STUDY_VERSION_8_1
from antarest.study.storage.rawstudy.model.filesystem.config.cluster import ClusterProperties
from antarest.study.storage.rawstudy.model.filesystem.config.identifier import IgnoreCaseIdentifier
@@ -110,7 +112,7 @@ class RenewableConfig(RenewableProperties, IgnoreCaseIdentifier):
RenewableConfigType = RenewableConfig
-def get_renewable_config_cls(study_version: t.Union[str, int]) -> t.Type[RenewableConfig]:
+def get_renewable_config_cls(study_version: StudyVersion) -> t.Type[RenewableConfig]:
"""
Retrieves the renewable configuration class based on the study version.
@@ -120,13 +122,12 @@ def get_renewable_config_cls(study_version: t.Union[str, int]) -> t.Type[Renewab
Returns:
The renewable configuration class.
"""
- version = int(study_version)
- if version >= 810:
+ if study_version >= STUDY_VERSION_8_1:
return RenewableConfig
raise ValueError(f"Unsupported study version {study_version}, required 810 or above.")
-def create_renewable_config(study_version: t.Union[str, int], **kwargs: t.Any) -> RenewableConfigType:
+def create_renewable_config(study_version: StudyVersion, **kwargs: t.Any) -> RenewableConfigType:
"""
Factory method to create a renewable configuration model.
diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/st_storage.py b/antarest/study/storage/rawstudy/model/filesystem/config/st_storage.py
index 792855b141..426e263baa 100644
--- a/antarest/study/storage/rawstudy/model/filesystem/config/st_storage.py
+++ b/antarest/study/storage/rawstudy/model/filesystem/config/st_storage.py
@@ -12,9 +12,11 @@
import typing as t
+from antares.study.version import StudyVersion
from pydantic import Field
from antarest.study.business.enum_ignore_case import EnumIgnoreCase
+from antarest.study.model import STUDY_VERSION_8_6, STUDY_VERSION_8_8
from antarest.study.storage.rawstudy.model.filesystem.config.cluster import ItemProperties
from antarest.study.storage.rawstudy.model.filesystem.config.identifier import LowerCaseIdentifier
@@ -161,7 +163,7 @@ class STStorage880Config(STStorage880Properties, LowerCaseIdentifier):
STStorageConfigType = t.Union[STStorageConfig, STStorage880Config]
-def get_st_storage_config_cls(study_version: t.Union[str, int]) -> t.Type[STStorageConfigType]:
+def get_st_storage_config_cls(study_version: StudyVersion) -> t.Type[STStorageConfigType]:
"""
Retrieves the short-term storage configuration class based on the study version.
@@ -171,15 +173,14 @@ def get_st_storage_config_cls(study_version: t.Union[str, int]) -> t.Type[STStor
Returns:
The short-term storage configuration class.
"""
- version = int(study_version)
- if version >= 880:
+ if study_version >= STUDY_VERSION_8_8:
return STStorage880Config
- elif version >= 860:
+ elif study_version >= STUDY_VERSION_8_6:
return STStorageConfig
- raise ValueError(f"Unsupported study version: {version}")
+ raise ValueError(f"Unsupported study version: {study_version}")
-def create_st_storage_config(study_version: t.Union[str, int], **kwargs: t.Any) -> STStorageConfigType:
+def create_st_storage_config(study_version: StudyVersion, **kwargs: t.Any) -> STStorageConfigType:
"""
Factory method to create a short-term storage configuration model.
diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/thermal.py b/antarest/study/storage/rawstudy/model/filesystem/config/thermal.py
index f3839566fd..ff095c2b2e 100644
--- a/antarest/study/storage/rawstudy/model/filesystem/config/thermal.py
+++ b/antarest/study/storage/rawstudy/model/filesystem/config/thermal.py
@@ -12,6 +12,7 @@
import typing as t
+from antares.study.version import StudyVersion
from pydantic import Field
from antarest.study.business.enum_ignore_case import EnumIgnoreCase
@@ -405,7 +406,7 @@ class Thermal870Config(Thermal870Properties, IgnoreCaseIdentifier):
ThermalConfigType = t.Union[Thermal870Config, Thermal860Config, ThermalConfig]
-def get_thermal_config_cls(study_version: t.Union[str, int]) -> t.Type[ThermalConfigType]:
+def get_thermal_config_cls(study_version: StudyVersion) -> t.Type[ThermalConfigType]:
"""
Retrieves the thermal configuration class based on the study version.
@@ -415,16 +416,15 @@ def get_thermal_config_cls(study_version: t.Union[str, int]) -> t.Type[ThermalCo
Returns:
The thermal configuration class.
"""
- version = int(study_version)
- if version >= 870:
+ if study_version >= 870:
return Thermal870Config
- elif version == 860:
+ elif study_version == 860:
return Thermal860Config
else:
return ThermalConfig
-def create_thermal_config(study_version: t.Union[str, int], **kwargs: t.Any) -> ThermalConfigType:
+def create_thermal_config(study_version: StudyVersion, **kwargs: t.Any) -> ThermalConfigType:
"""
Factory method to create a thermal configuration model.
diff --git a/antarest/study/storage/rawstudy/model/filesystem/root/input/areas/item/item.py b/antarest/study/storage/rawstudy/model/filesystem/root/input/areas/item/item.py
index 37ee1d8381..5ea6ad808a 100644
--- a/antarest/study/storage/rawstudy/model/filesystem/root/input/areas/item/item.py
+++ b/antarest/study/storage/rawstudy/model/filesystem/root/input/areas/item/item.py
@@ -10,6 +10,7 @@
#
# This file is part of the Antares project.
+from antarest.study.model import STUDY_VERSION_8_3
from antarest.study.storage.rawstudy.model.filesystem.folder_node import FolderNode
from antarest.study.storage.rawstudy.model.filesystem.inode import TREE
from antarest.study.storage.rawstudy.model.filesystem.root.input.areas.item.adequacy_patch import (
@@ -28,7 +29,7 @@ def build(self) -> TREE:
self.config.next_file("optimization.ini"),
),
}
- if self.config.version >= 830:
+ if self.config.version >= STUDY_VERSION_8_3:
children["adequacy_patch"] = InputAreasAdequacyPatch(
self.context, self.config.next_file("adequacy_patch.ini")
)
diff --git a/antarest/study/storage/rawstudy/model/filesystem/root/input/hydro/common/capacity/capacity.py b/antarest/study/storage/rawstudy/model/filesystem/root/input/hydro/common/capacity/capacity.py
index 7627faa777..56eeceadd2 100644
--- a/antarest/study/storage/rawstudy/model/filesystem/root/input/hydro/common/capacity/capacity.py
+++ b/antarest/study/storage/rawstudy/model/filesystem/root/input/hydro/common/capacity/capacity.py
@@ -12,6 +12,9 @@
from typing import List, TypedDict
+from antares.study.version import StudyVersion
+
+from antarest.study.model import STUDY_VERSION_6_5
from antarest.study.storage.rawstudy.model.filesystem.folder_node import FolderNode
from antarest.study.storage.rawstudy.model.filesystem.inode import TREE
from antarest.study.storage.rawstudy.model.filesystem.matrix.input_series_matrix import InputSeriesMatrix
@@ -21,35 +24,36 @@
class MatrixInfo(TypedDict, total=False):
name: str
freq: MatrixFrequency
- start_version: int
+ start_version: StudyVersion
+INITIAL_VERSION = StudyVersion.parse(0)
# noinspection SpellCheckingInspection
MATRICES_INFO: List[MatrixInfo] = [
{
"name": "maxpower",
"freq": MatrixFrequency.DAILY,
- "start_version": 0,
+ "start_version": INITIAL_VERSION,
},
{
"name": "reservoir",
"freq": MatrixFrequency.DAILY,
- "start_version": 0,
+ "start_version": INITIAL_VERSION,
},
{
"name": "inflowPattern",
"freq": MatrixFrequency.DAILY,
- "start_version": 650,
+ "start_version": STUDY_VERSION_6_5,
},
{
"name": "creditmodulations",
"freq": MatrixFrequency.HOURLY,
- "start_version": 650,
+ "start_version": STUDY_VERSION_6_5,
},
{
"name": "waterValues",
"freq": MatrixFrequency.DAILY,
- "start_version": 650,
+ "start_version": STUDY_VERSION_6_5,
},
]
diff --git a/antarest/study/storage/rawstudy/model/filesystem/root/input/hydro/hydro_ini.py b/antarest/study/storage/rawstudy/model/filesystem/root/input/hydro/hydro_ini.py
index b38bafe736..d211875726 100644
--- a/antarest/study/storage/rawstudy/model/filesystem/root/input/hydro/hydro_ini.py
+++ b/antarest/study/storage/rawstudy/model/filesystem/root/input/hydro/hydro_ini.py
@@ -10,6 +10,9 @@
#
# This file is part of the Antares project.
+from antares.study.version import StudyVersion
+
+from antarest.study.model import STUDY_VERSION_6_5
from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig
from antarest.study.storage.rawstudy.model.filesystem.context import ContextServer
from antarest.study.storage.rawstudy.model.filesystem.ini_file_node import IniFileNode
@@ -40,7 +43,7 @@ def __init__(self, context: ContextServer, config: FileStudyTreeConfig):
"use leeway", # bool
"power to level", # bool
]
- if config.version >= 650:
+ if config.version >= STUDY_VERSION_6_5:
sections += [
"initialize reservoir date",
"leeway low",
diff --git a/antarest/study/storage/rawstudy/model/filesystem/root/input/hydro/series/area/area.py b/antarest/study/storage/rawstudy/model/filesystem/root/input/hydro/series/area/area.py
index 9488a2ffd9..792fb9335f 100644
--- a/antarest/study/storage/rawstudy/model/filesystem/root/input/hydro/series/area/area.py
+++ b/antarest/study/storage/rawstudy/model/filesystem/root/input/hydro/series/area/area.py
@@ -12,6 +12,7 @@
from typing import Any, Dict
+from antarest.study.model import STUDY_VERSION_6_5, STUDY_VERSION_8_6
from antarest.study.storage.rawstudy.model.filesystem.folder_node import FolderNode
from antarest.study.storage.rawstudy.model.filesystem.inode import TREE, INode
from antarest.study.storage.rawstudy.model.filesystem.matrix.constants import (
@@ -25,8 +26,9 @@
class InputHydroSeriesArea(FolderNode):
def build(self) -> TREE:
- freq = MatrixFrequency.DAILY if self.config.version >= 650 else MatrixFrequency.MONTHLY
- default_empty = default_scenario_daily if self.config.version >= 650 else default_scenario_monthly
+ study_version = self.config.version
+ freq = MatrixFrequency.DAILY if study_version >= STUDY_VERSION_6_5 else MatrixFrequency.MONTHLY
+ default_empty = default_scenario_daily if study_version >= STUDY_VERSION_6_5 else default_scenario_monthly
hydro_series_matrices: Dict[str, INode[Any, Any, Any]] = {
"mod": InputSeriesMatrix(
self.context,
@@ -42,7 +44,7 @@ def build(self) -> TREE:
default_empty=default_scenario_hourly,
),
}
- if self.config.version >= 860:
+ if study_version >= STUDY_VERSION_8_6:
hydro_series_matrices["mingen"] = InputSeriesMatrix(
self.context,
self.config.next_file("mingen.txt"),
diff --git a/antarest/study/storage/rawstudy/model/filesystem/root/input/input.py b/antarest/study/storage/rawstudy/model/filesystem/root/input/input.py
index cb4c5e01b6..1c19358baf 100644
--- a/antarest/study/storage/rawstudy/model/filesystem/root/input/input.py
+++ b/antarest/study/storage/rawstudy/model/filesystem/root/input/input.py
@@ -10,6 +10,7 @@
#
# This file is part of the Antares project.
+from antarest.study.model import STUDY_VERSION_8_1, STUDY_VERSION_8_6
from antarest.study.storage.rawstudy.model.filesystem.config.model import EnrModelling
from antarest.study.storage.rawstudy.model.filesystem.folder_node import FolderNode
from antarest.study.storage.rawstudy.model.filesystem.inode import TREE
@@ -49,11 +50,14 @@ def build(self) -> TREE:
"wind": InputPreproSeries(self.context, config.next_file("wind"), "wind_"),
}
- has_renewables = config.version >= 810 and EnrModelling(config.enr_modelling) == EnrModelling.CLUSTERS
+ study_version = config.version
+ has_renewables = (
+ study_version >= STUDY_VERSION_8_1 and EnrModelling(config.enr_modelling) == EnrModelling.CLUSTERS
+ )
if has_renewables:
children["renewables"] = ClusteredRenewables(self.context, config.next_file("renewables"))
- if config.version >= 860:
+ if study_version >= STUDY_VERSION_8_6:
children["st-storage"] = InputSTStorage(self.context, config.next_file("st-storage"))
return children
diff --git a/antarest/study/storage/rawstudy/model/filesystem/root/input/link/area/area.py b/antarest/study/storage/rawstudy/model/filesystem/root/input/link/area/area.py
index b683390565..a67fbf6dfc 100644
--- a/antarest/study/storage/rawstudy/model/filesystem/root/input/link/area/area.py
+++ b/antarest/study/storage/rawstudy/model/filesystem/root/input/link/area/area.py
@@ -10,6 +10,7 @@
#
# This file is part of the Antares project.
+from antarest.study.model import STUDY_VERSION_8_2
from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig
from antarest.study.storage.rawstudy.model.filesystem.context import ContextServer
from antarest.study.storage.rawstudy.model.filesystem.folder_node import FolderNode
@@ -35,7 +36,7 @@ def build(self) -> TREE:
children: TREE
ctx = self.context
cfg = self.config
- if cfg.version < 820:
+ if cfg.version < STUDY_VERSION_8_2:
children = {link: InputSeriesMatrix(ctx, cfg.next_file(f"{link}.txt")) for link in cfg.get_links(self.area)}
else:
children = {
diff --git a/antarest/study/storage/rawstudy/model/filesystem/root/input/link/area/properties.py b/antarest/study/storage/rawstudy/model/filesystem/root/input/link/area/properties.py
index a5ceb35fad..74992f5dd0 100644
--- a/antarest/study/storage/rawstudy/model/filesystem/root/input/link/area/properties.py
+++ b/antarest/study/storage/rawstudy/model/filesystem/root/input/link/area/properties.py
@@ -10,6 +10,7 @@
#
# This file is part of the Antares project.
+from antarest.study.model import STUDY_VERSION_6_5
from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig
from antarest.study.storage.rawstudy.model.filesystem.context import ContextServer
from antarest.study.storage.rawstudy.model.filesystem.ini_file_node import IniFileNode
@@ -30,7 +31,7 @@ def __init__(
"filter-year-by-year": str,
}
- if config.version >= 650:
+ if config.version >= STUDY_VERSION_6_5:
section["loop-flow"] = bool
section["use-phase-shifter"] = bool
section["asset-type"] = str
diff --git a/antarest/study/storage/rawstudy/model/filesystem/root/settings/generaldata.py b/antarest/study/storage/rawstudy/model/filesystem/root/settings/generaldata.py
index 5066bbbabd..27ece887bf 100644
--- a/antarest/study/storage/rawstudy/model/filesystem/root/settings/generaldata.py
+++ b/antarest/study/storage/rawstudy/model/filesystem/root/settings/generaldata.py
@@ -13,6 +13,18 @@
from copy import deepcopy
from typing import Any, Dict
+from antarest.study.model import (
+ STUDY_VERSION_6_5,
+ STUDY_VERSION_7_0,
+ STUDY_VERSION_7_1,
+ STUDY_VERSION_7_2,
+ STUDY_VERSION_8,
+ STUDY_VERSION_8_1,
+ STUDY_VERSION_8_3,
+ STUDY_VERSION_8_4,
+ STUDY_VERSION_8_5,
+ STUDY_VERSION_8_6,
+)
from antarest.study.storage.rawstudy.ini_reader import IniReader
from antarest.study.storage.rawstudy.ini_writer import IniWriter
from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig
@@ -113,25 +125,26 @@ def __init__(self, context: ContextServer, config: FileStudyTreeConfig):
general = types["general"]
optimization = types["optimization"]
other_preferences = types["other preferences"]
- if config.version >= 650:
+ study_version = config.version
+ if study_version >= STUDY_VERSION_6_5:
other_preferences["initial-reservoir-levels"] = str
- if config.version >= 700:
+ if study_version >= STUDY_VERSION_7_0:
optimization["link-type"] = str
- if config.version >= 710:
+ if study_version >= STUDY_VERSION_7_1:
general["thematic-trimming"] = bool
general["geographic-trimming"] = bool
del general["filtering"]
- if config.version >= 720:
+ if study_version >= STUDY_VERSION_7_2:
other_preferences["hydro-pricing-mode"] = str
- if config.version >= 800:
+ if study_version >= STUDY_VERSION_8:
other_preferences["hydro-heuristic-policy"] = str
optimization["include-exportstructure"] = bool
optimization["include-unfeasible-problem-behavior"] = str
general["custom-scenario"] = bool
del general["custom-ts-numbers"]
- if config.version >= 810:
+ if study_version >= STUDY_VERSION_8_1:
other_preferences["renewable-generation-modelling"] = str
- if config.version >= 830:
+ if study_version >= STUDY_VERSION_8_3:
types["adequacy patch"] = {
"include-adq-patch": bool,
"set-to-null-ntc-from-physical-out-to-physical-in-for-first-step": bool,
@@ -140,9 +153,9 @@ def __init__(self, context: ContextServer, config: FileStudyTreeConfig):
optimization["include-split-exported-mps"] = bool
# include-exportmps: none, optim-1, optim-2, both-optims
optimization["include-exportmps"] = str
- if config.version >= 840:
+ if study_version >= STUDY_VERSION_8_4:
del optimization["include-split-exported-mps"]
- if config.version >= 850:
+ if study_version >= STUDY_VERSION_8_5:
adequacy = types["adequacy patch"]
adequacy["price-taking-order"] = str
adequacy["include-hurdle-cost-csr"] = bool
@@ -151,7 +164,7 @@ def __init__(self, context: ContextServer, config: FileStudyTreeConfig):
adequacy["threshold-display-local-matching-rule-violations"] = float
adequacy["threshold-csr-variable-bounds-relaxation"] = int
- if config.version >= 860:
+ if study_version >= STUDY_VERSION_8_6:
types["adequacy patch"]["enable-first-step "] = bool
IniFileNode.__init__(
diff --git a/antarest/study/storage/rawstudy/model/filesystem/root/settings/scenariobuilder.py b/antarest/study/storage/rawstudy/model/filesystem/root/settings/scenariobuilder.py
index f80f99a942..bf0501d531 100644
--- a/antarest/study/storage/rawstudy/model/filesystem/root/settings/scenariobuilder.py
+++ b/antarest/study/storage/rawstudy/model/filesystem/root/settings/scenariobuilder.py
@@ -15,6 +15,13 @@
import typing_extensions as te
+from antarest.study.model import (
+ STUDY_VERSION_8,
+ STUDY_VERSION_8_1,
+ STUDY_VERSION_8_7,
+ STUDY_VERSION_9_1,
+ STUDY_VERSION_9_2,
+)
from antarest.study.storage.rawstudy.model.filesystem.config.model import EnrModelling, FileStudyTreeConfig
from antarest.study.storage.rawstudy.model.filesystem.context import ContextServer
from antarest.study.storage.rawstudy.model.filesystem.ini_file_node import IniFileNode
@@ -64,15 +71,15 @@ def __init__(self, context: ContextServer, config: FileStudyTreeConfig):
# Rules are defined for a specific version of the study.
study_version = config.version
- if study_version >= 810 and EnrModelling(self.config.enr_modelling) == EnrModelling.CLUSTERS:
+ if study_version >= STUDY_VERSION_8_1 and EnrModelling(self.config.enr_modelling) == EnrModelling.CLUSTERS:
self._populate_renewable_rules(rules)
- if study_version >= 870:
+ if study_version >= STUDY_VERSION_8_7:
self._populate_binding_constraints_rules(rules)
- if study_version >= 800:
+ if study_version >= STUDY_VERSION_8:
self._populate_hydro_initial_level_rules(rules)
- if study_version >= 920:
+ if study_version >= STUDY_VERSION_9_2:
self._populate_hydro_final_level_rules(rules)
- if study_version >= 910:
+ if study_version >= STUDY_VERSION_9_1:
self._populate_hydro_generation_power_rules(rules)
super().__init__(
diff --git a/antarest/study/storage/rawstudy/raw_study_service.py b/antarest/study/storage/rawstudy/raw_study_service.py
index 5e9295107a..1dca10323f 100644
--- a/antarest/study/storage/rawstudy/raw_study_service.py
+++ b/antarest/study/storage/rawstudy/raw_study_service.py
@@ -20,6 +20,8 @@
from uuid import uuid4
from zipfile import ZipFile
+from antares.study.version import StudyVersion
+
from antarest.core.config import Config
from antarest.core.exceptions import StudyDeletionNotAllowed
from antarest.core.interfaces.cache import ICache
@@ -202,7 +204,7 @@ def create(self, metadata: RawStudy) -> RawStudy:
path_study.mkdir()
create_new_empty_study(
- version=metadata.version,
+ version=StudyVersion.parse(metadata.version),
path_study=path_study,
path_resources=self.path_resources,
)
@@ -442,7 +444,7 @@ def check_and_update_study_version_in_database(self, study: RawStudy) -> None:
study_path=study_path,
path=study_path,
study_id="",
- version=-1,
+ version=StudyVersion.parse(0),
)
raw_study = self.study_factory.create_from_config(config)
file_metadata = raw_study.get(url=["study", "antares"])
diff --git a/antarest/study/storage/study_download_utils.py b/antarest/study/storage/study_download_utils.py
index 9c5f6beed3..f46b23e254 100644
--- a/antarest/study/storage/study_download_utils.py
+++ b/antarest/study/storage/study_download_utils.py
@@ -27,6 +27,7 @@
from antarest.core.exceptions import ChildNotFoundError
from antarest.core.serialization import to_json
from antarest.study.model import (
+ STUDY_VERSION_8_1,
ExportFormat,
MatrixAggregationResult,
MatrixAggregationResultDTO,
@@ -115,7 +116,9 @@ def level_output_filter(
cluster_details = [f"details-{data.level.value}"]
config = study.config
- has_renewables = config.version >= 810 and EnrModelling(config.enr_modelling) == EnrModelling.CLUSTERS
+ has_renewables = (
+ config.version >= STUDY_VERSION_8_1 and EnrModelling(config.enr_modelling) == EnrModelling.CLUSTERS
+ )
if has_renewables:
cluster_details += [f"details-res-{data.level.value}"]
diff --git a/antarest/study/storage/utils.py b/antarest/study/storage/utils.py
index 5d7a6c2db0..9eb9a3d682 100644
--- a/antarest/study/storage/utils.py
+++ b/antarest/study/storage/utils.py
@@ -23,6 +23,8 @@
from uuid import uuid4
from zipfile import ZipFile
+from antares.study.version import StudyVersion
+
from antarest.core.exceptions import StudyValidationError, UnsupportedStudyVersion
from antarest.core.interfaces.cache import CacheConstants, ICache
from antarest.core.jwt import JWTUser
@@ -161,7 +163,7 @@ def remove_from_cache(cache: ICache, root_id: str) -> None:
)
-def create_new_empty_study(version: str, path_study: Path, path_resources: Path) -> None:
+def create_new_empty_study(version: StudyVersion, path_study: Path, path_resources: Path) -> None:
version_template: t.Optional[str] = STUDY_REFERENCE_TEMPLATES.get(version, None)
if version_template is None:
msg = f"{version} is not a supported version, supported versions are: {list(STUDY_REFERENCE_TEMPLATES.keys())}"
diff --git a/antarest/study/storage/variantstudy/business/command_extractor.py b/antarest/study/storage/variantstudy/business/command_extractor.py
index adbcc811d8..e71027fcb9 100644
--- a/antarest/study/storage/variantstudy/business/command_extractor.py
+++ b/antarest/study/storage/variantstudy/business/command_extractor.py
@@ -20,6 +20,7 @@
from antarest.core.utils.utils import StopWatch
from antarest.matrixstore.model import MatrixData
from antarest.matrixstore.service import ISimpleMatrixService
+from antarest.study.model import STUDY_VERSION_6_5, STUDY_VERSION_8_2, STUDY_VERSION_8_7
from antarest.study.storage.patch_service import PatchService
from antarest.study.storage.rawstudy.model.filesystem.config.files import get_playlist
from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy
@@ -173,7 +174,7 @@ def extract_link(
)
null_matrix_id = strip_matrix_protocol(self.generator_matrix_constants.get_null_matrix())
commands: t.List[ICommand] = [link_command, link_config_command]
- if study.config.version < 820:
+ if study.config.version < STUDY_VERSION_8_2:
commands.append(
self.generate_replace_matrix(
study_tree,
@@ -288,7 +289,7 @@ def extract_hydro(self, study: FileStudy, area_id: str) -> t.List[ICommand]:
),
]
- if study_tree.config.version > 650:
+ if study_tree.config.version > STUDY_VERSION_6_5:
commands += [
self.generate_replace_matrix(
study_tree,
@@ -367,7 +368,7 @@ def extract_binding_constraint(
del binding[term_id]
# Extract the matrices associated with the binding constraint
- if study.config.version < 870:
+ if study.config.version < STUDY_VERSION_8_7:
urls = {"values": ["input", "bindingconstraints", bc_id]}
else:
urls = {
diff --git a/antarest/study/storage/variantstudy/business/matrix_constants_generator.py b/antarest/study/storage/variantstudy/business/matrix_constants_generator.py
index abb0953b4c..98f098a07d 100644
--- a/antarest/study/storage/variantstudy/business/matrix_constants_generator.py
+++ b/antarest/study/storage/variantstudy/business/matrix_constants_generator.py
@@ -14,9 +14,11 @@
from pathlib import Path
from typing import Dict
+from antares.study.version import StudyVersion
from filelock import FileLock
from antarest.matrixstore.service import ISimpleMatrixService
+from antarest.study.model import STUDY_VERSION_6_5, STUDY_VERSION_8_2
from antarest.study.storage.variantstudy.business import matrix_constants
from antarest.study.storage.variantstudy.business.matrix_constants.common import (
FIXED_4_COLUMNS,
@@ -125,14 +127,14 @@ def init_constant_matrices(
matrix_constants.st_storage.series.pmax_injection
)
- def get_hydro_max_power(self, version: int) -> str:
- if version > 650:
+ def get_hydro_max_power(self, version: StudyVersion) -> str:
+ if version > STUDY_VERSION_6_5:
return MATRIX_PROTOCOL_PREFIX + self.hashes[HYDRO_COMMON_CAPACITY_MAX_POWER_V7]
else:
return MATRIX_PROTOCOL_PREFIX + self.hashes[NULL_MATRIX_NAME]
- def get_hydro_reservoir(self, version: int) -> str:
- if version > 650:
+ def get_hydro_reservoir(self, version: StudyVersion) -> str:
+ if version > STUDY_VERSION_6_5:
return MATRIX_PROTOCOL_PREFIX + self.hashes[HYDRO_COMMON_CAPACITY_RESERVOIR_V7]
return MATRIX_PROTOCOL_PREFIX + self.hashes[HYDRO_COMMON_CAPACITY_RESERVOIR_V6]
@@ -154,8 +156,8 @@ def get_thermal_prepro_data(self) -> str:
def get_thermal_prepro_modulation(self) -> str:
return MATRIX_PROTOCOL_PREFIX + self.hashes[THERMAL_PREPRO_MODULATION]
- def get_link(self, version: int) -> str:
- if version < 820:
+ def get_link(self, version: StudyVersion) -> str:
+ if version < STUDY_VERSION_8_2:
return MATRIX_PROTOCOL_PREFIX + self.hashes[LINK_V7]
return MATRIX_PROTOCOL_PREFIX + self.hashes[LINK_V8]
diff --git a/antarest/study/storage/variantstudy/business/utils.py b/antarest/study/storage/variantstudy/business/utils.py
index 9f1988d1de..e352466be2 100644
--- a/antarest/study/storage/variantstudy/business/utils.py
+++ b/antarest/study/storage/variantstudy/business/utils.py
@@ -15,6 +15,7 @@
from antarest.core.model import JSON
from antarest.matrixstore.model import MatrixData
from antarest.matrixstore.service import ISimpleMatrixService
+from antarest.study.model import STUDY_VERSION_8_2
from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy
from antarest.study.storage.variantstudy.business.matrix_constants_generator import MATRIX_PROTOCOL_PREFIX
from antarest.study.storage.variantstudy.model.command.icommand import ICommand
@@ -87,7 +88,7 @@ def links_series(alias: str, study: FileStudy) -> str:
data = alias.split("/")
area_from = data[1]
area_to = data[2]
- if study.config.version < 820:
+ if study.config.version < STUDY_VERSION_8_2:
return f"input/links/{area_from}/{area_to}"
return f"input/links/{area_from}/{area_to}_parameters"
diff --git a/antarest/study/storage/variantstudy/model/command/create_area.py b/antarest/study/storage/variantstudy/model/command/create_area.py
index bfc8711b8e..b3f1a30e7f 100644
--- a/antarest/study/storage/variantstudy/model/command/create_area.py
+++ b/antarest/study/storage/variantstudy/model/command/create_area.py
@@ -15,6 +15,7 @@
from pydantic import Field
from antarest.core.model import JSON
+from antarest.study.model import STUDY_VERSION_6_5, STUDY_VERSION_8_1, STUDY_VERSION_8_3, STUDY_VERSION_8_6
from antarest.study.storage.rawstudy.model.filesystem.config.model import (
Area,
EnrModelling,
@@ -247,7 +248,7 @@ def _apply(self, study_data: FileStudy) -> CommandOutput:
new_correlation.setdefault("annual", {})
new_area_data["input"]["hydro"]["prepro"]["correlation"] = new_correlation
- if version > 650:
+ if version > STUDY_VERSION_6_5:
hydro_config.setdefault("initialize reservoir date", {})[area_id] = 0
hydro_config.setdefault("leeway low", {})[area_id] = 1
hydro_config.setdefault("leeway up", {})[area_id] = 1
@@ -261,16 +262,16 @@ def _apply(self, study_data: FileStudy) -> CommandOutput:
] = self.command_context.generator_matrix_constants.get_hydro_inflow_pattern()
new_area_data["input"]["hydro"]["common"]["capacity"][f"waterValues_{area_id}"] = null_matrix
- has_renewables = config.version >= 810 and EnrModelling(config.enr_modelling) == EnrModelling.CLUSTERS
+ has_renewables = version >= STUDY_VERSION_8_1 and EnrModelling(config.enr_modelling) == EnrModelling.CLUSTERS
if has_renewables:
new_area_data["input"]["renewables"] = {"clusters": {area_id: {"list": {}}}}
- if version >= 830:
+ if version >= STUDY_VERSION_8_3:
new_area_data["input"]["areas"][area_id]["adequacy_patch"] = {
"adequacy-patch": {"adequacy-patch-mode": "outside"}
}
- if version >= 860:
+ if version >= STUDY_VERSION_8_6:
new_area_data["input"]["st-storage"] = {"clusters": {area_id: {"list": {}}}}
new_area_data["input"]["hydro"]["series"][area_id]["mingen"] = null_matrix
diff --git a/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py b/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py
index 50ce659fd9..a57f51a666 100644
--- a/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py
+++ b/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py
@@ -15,10 +15,13 @@
from enum import Enum
import numpy as np
-from pydantic import BaseModel, Field, field_validator, model_validator
+from antares.study.version import StudyVersion
+from pydantic import Field, field_validator, model_validator
+from antarest.core.serialization import AntaresBaseModel
from antarest.matrixstore.model import MatrixData
from antarest.study.business.all_optional_meta import all_optional_model, camel_case_model
+from antarest.study.model import STUDY_VERSION_8_3, STUDY_VERSION_8_7
from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import (
DEFAULT_GROUP,
DEFAULT_OPERATOR,
@@ -53,7 +56,7 @@ class TermMatrices(Enum):
EQUAL = "equal_term_matrix"
-def check_matrix_values(time_step: BindingConstraintFrequency, values: MatrixType, version: int) -> None:
+def check_matrix_values(time_step: BindingConstraintFrequency, values: MatrixType, version: StudyVersion) -> None:
"""
Check the binding constraint's matrix values for the specified time step.
@@ -90,7 +93,7 @@ def check_matrix_values(time_step: BindingConstraintFrequency, values: MatrixTyp
# =================================================================================
-class BindingConstraintPropertiesBase(BaseModel, extra="forbid", populate_by_name=True):
+class BindingConstraintPropertiesBase(AntaresBaseModel, extra="forbid", populate_by_name=True):
enabled: bool = True
time_step: BindingConstraintFrequency = Field(DEFAULT_TIMESTEP, alias="type")
operator: BindingConstraintOperator = DEFAULT_OPERATOR
@@ -123,20 +126,19 @@ class BindingConstraintProperties870(BindingConstraintProperties830):
]
-def get_binding_constraint_config_cls(study_version: t.Union[str, int]) -> t.Type[BindingConstraintProperties]:
+def get_binding_constraint_config_cls(study_version: StudyVersion) -> t.Type[BindingConstraintProperties]:
"""
Retrieves the binding constraint configuration class based on the study version.
"""
- version = int(study_version)
- if version >= 870:
+ if study_version >= STUDY_VERSION_8_7:
return BindingConstraintProperties870
- elif version >= 830:
+ elif study_version >= STUDY_VERSION_8_3:
return BindingConstraintProperties830
else:
return BindingConstraintPropertiesBase
-def create_binding_constraint_config(study_version: t.Union[str, int], **kwargs: t.Any) -> BindingConstraintProperties:
+def create_binding_constraint_config(study_version: StudyVersion, **kwargs: t.Any) -> BindingConstraintProperties:
"""
Factory method to create a binding constraint configuration model.
@@ -163,7 +165,7 @@ class OptionalProperties(BindingConstraintProperties870):
@camel_case_model
-class BindingConstraintMatrices(BaseModel, extra="forbid", populate_by_name=True):
+class BindingConstraintMatrices(AntaresBaseModel, extra="forbid", populate_by_name=True):
"""
Class used to store the matrices of a binding constraint.
"""
@@ -253,7 +255,11 @@ def get_inner_matrices(self) -> t.List[str]:
]
def get_corresponding_matrices(
- self, v: t.Optional[t.Union[MatrixType, str]], time_step: BindingConstraintFrequency, version: int, create: bool
+ self,
+ v: t.Optional[t.Union[MatrixType, str]],
+ time_step: BindingConstraintFrequency,
+ version: StudyVersion,
+ create: bool,
) -> t.Optional[str]:
constants: GeneratorMatrixConstants = self.command_context.generator_matrix_constants
@@ -290,10 +296,10 @@ def validates_and_fills_matrices(
*,
time_step: BindingConstraintFrequency,
specific_matrices: t.Optional[t.List[str]],
- version: int,
+ version: StudyVersion,
create: bool,
) -> None:
- if version < 870:
+ if version < STUDY_VERSION_8_7:
self.values = self.get_corresponding_matrices(self.values, time_step, version, create)
elif specific_matrices:
for matrix in specific_matrices:
@@ -362,7 +368,7 @@ def apply_binding_constraint(
bd_id, study_data.config, self.coeffs or {}, operator=current_operator, time_step=time_step, group=group
)
- if version >= 870:
+ if version >= STUDY_VERSION_8_7:
# When all BC of a given group are removed, the group should be removed from the scenario builder
old_groups = old_groups or set()
new_groups = {bd.get("group", DEFAULT_GROUP).lower() for bd in binding_constraints.values()}
@@ -372,7 +378,7 @@ def apply_binding_constraint(
if self.values:
if not isinstance(self.values, str): # pragma: no cover
raise TypeError(repr(self.values))
- if version < 870:
+ if version < STUDY_VERSION_8_7:
study_data.tree.save(self.values, ["input", "bindingconstraints", bd_id])
operator_matrices_map = {
@@ -386,7 +392,7 @@ def apply_binding_constraint(
if matrix_term:
if not isinstance(matrix_term, str): # pragma: no cover
raise TypeError(repr(matrix_term))
- if version >= 870:
+ if version >= STUDY_VERSION_8_7:
matrix_id = f"{bd_id}_{matrix_alias}"
study_data.tree.save(matrix_term, ["input", "bindingconstraints", matrix_id])
return CommandOutput(status=True)
diff --git a/antarest/study/storage/variantstudy/model/command/create_cluster.py b/antarest/study/storage/variantstudy/model/command/create_cluster.py
index a1c6ef17aa..ace36bce74 100644
--- a/antarest/study/storage/variantstudy/model/command/create_cluster.py
+++ b/antarest/study/storage/variantstudy/model/command/create_cluster.py
@@ -17,6 +17,7 @@
from antarest.core.model import JSON
from antarest.core.utils.utils import assert_this
from antarest.matrixstore.model import MatrixData
+from antarest.study.model import STUDY_VERSION_8_7
from antarest.study.storage.rawstudy.model.filesystem.config.model import (
Area,
FileStudyTreeConfig,
@@ -149,7 +150,7 @@ def _apply(self, study_data: FileStudy) -> CommandOutput:
}
}
}
- if study_data.config.version >= 870:
+ if study_data.config.version >= STUDY_VERSION_8_7:
new_cluster_data["input"]["thermal"]["series"][self.area_id][series_id]["CO2Cost"] = null_matrix
new_cluster_data["input"]["thermal"]["series"][self.area_id][series_id]["fuelCost"] = null_matrix
study_data.tree.save(new_cluster_data)
diff --git a/antarest/study/storage/variantstudy/model/command/create_link.py b/antarest/study/storage/variantstudy/model/command/create_link.py
index ef2d21f0a9..eec1c4f7ea 100644
--- a/antarest/study/storage/variantstudy/model/command/create_link.py
+++ b/antarest/study/storage/variantstudy/model/command/create_link.py
@@ -17,6 +17,7 @@
from antarest.core.model import JSON
from antarest.core.utils.utils import assert_this
from antarest.matrixstore.model import MatrixData
+from antarest.study.model import STUDY_VERSION_8_2
from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig, Link
from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy
from antarest.study.storage.variantstudy.business.utils import strip_matrix_protocol, validate_matrix
@@ -221,7 +222,7 @@ def _apply(self, study_data: FileStudy) -> CommandOutput:
self.indirect = self.indirect or (self.command_context.generator_matrix_constants.get_link_indirect())
assert type(self.series) is str
- if version < 820:
+ if version < STUDY_VERSION_8_2:
study_data.tree.save(self.series, ["input", "links", area_from, area_to])
else:
study_data.tree.save(
diff --git a/antarest/study/storage/variantstudy/model/command/create_st_storage.py b/antarest/study/storage/variantstudy/model/command/create_st_storage.py
index 8244957da5..4bebf009c3 100644
--- a/antarest/study/storage/variantstudy/model/command/create_st_storage.py
+++ b/antarest/study/storage/variantstudy/model/command/create_st_storage.py
@@ -17,6 +17,7 @@
from antarest.core.model import JSON
from antarest.matrixstore.model import MatrixData
+from antarest.study.model import STUDY_VERSION_8_6
from antarest.study.storage.rawstudy.model.filesystem.config.model import Area, FileStudyTreeConfig
from antarest.study.storage.rawstudy.model.filesystem.config.st_storage import STStorageConfigType
from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy
@@ -36,7 +37,7 @@
)
# Minimum required version.
-REQUIRED_VERSION = 860
+REQUIRED_VERSION = STUDY_VERSION_8_6
MatrixType = t.List[t.List[MatrixData]]
diff --git a/antarest/study/storage/variantstudy/model/command/remove_area.py b/antarest/study/storage/variantstudy/model/command/remove_area.py
index 3bf3e1856c..a61290bab1 100644
--- a/antarest/study/storage/variantstudy/model/command/remove_area.py
+++ b/antarest/study/storage/variantstudy/model/command/remove_area.py
@@ -16,6 +16,13 @@
from antarest.core.exceptions import ChildNotFoundError
from antarest.core.model import JSON
+from antarest.study.model import (
+ STUDY_VERSION_6_5,
+ STUDY_VERSION_8_1,
+ STUDY_VERSION_8_2,
+ STUDY_VERSION_8_6,
+ STUDY_VERSION_8_7,
+)
from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig
from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy
from antarest.study.storage.variantstudy.business.utils_binding_constraint import (
@@ -75,7 +82,7 @@ def _remove_area_from_links(self, study_data: FileStudy) -> None:
if link == self.id:
study_data.tree.delete(["input", "links", area_name, "properties", self.id])
try:
- if study_data.config.version < 820:
+ if study_data.config.version < STUDY_VERSION_8_2:
study_data.tree.delete(["input", "links", area_name, self.id])
else:
study_data.tree.delete(["input", "links", area_name, f"{self.id}_parameters"])
@@ -129,7 +136,7 @@ def _remove_area_from_binding_constraints(self, study_data: FileStudy) -> None:
bc_to_remove[bc_index] = binding_constraints.pop(bc_index)
break
- matrix_suffixes = ["_lt", "_gt", "_eq"] if study_data.config.version >= 870 else [""]
+ matrix_suffixes = ["_lt", "_gt", "_eq"] if study_data.config.version >= STUDY_VERSION_8_7 else [""]
for bc_index, bc in bc_to_remove.items():
for suffix in matrix_suffixes:
@@ -239,7 +246,8 @@ def _apply(self, study_data: FileStudy) -> CommandOutput:
study_data.tree.delete(["input", "wind", "series", f"wind_{self.id}"])
study_data.tree.delete(["input", "links", self.id])
- if study_data.config.version > 650:
+ study_version = study_data.config.version
+ if study_version > STUDY_VERSION_6_5:
study_data.tree.delete(["input", "hydro", "hydro", "initialize reservoir date", self.id])
study_data.tree.delete(["input", "hydro", "hydro", "leeway low", self.id])
study_data.tree.delete(["input", "hydro", "hydro", "leeway up", self.id])
@@ -248,13 +256,13 @@ def _apply(self, study_data: FileStudy) -> CommandOutput:
study_data.tree.delete(["input", "hydro", "common", "capacity", f"inflowPattern_{self.id}"])
study_data.tree.delete(["input", "hydro", "common", "capacity", f"waterValues_{self.id}"])
- if study_data.config.version >= 810:
+ if study_version >= STUDY_VERSION_8_1:
with contextlib.suppress(ChildNotFoundError):
# renewables folder only exist in tree if study.renewable-generation-modelling is "clusters"
study_data.tree.delete(["input", "renewables", "clusters", self.id])
study_data.tree.delete(["input", "renewables", "series", self.id])
- if study_data.config.version >= 860:
+ if study_version >= STUDY_VERSION_8_6:
study_data.tree.delete(["input", "st-storage", "clusters", self.id])
study_data.tree.delete(["input", "st-storage", "series", self.id])
@@ -267,13 +275,7 @@ def _apply(self, study_data: FileStudy) -> CommandOutput:
output, _ = self._apply_config(study_data.config)
- new_area_data: JSON = {
- "input": {
- "areas": {
- "list": [area.name for area in study_data.config.areas.values()],
- }
- }
- }
+ new_area_data: JSON = {"input": {"areas": {"list": [area.name for area in study_data.config.areas.values()]}}}
study_data.tree.save(new_area_data)
return output
diff --git a/antarest/study/storage/variantstudy/model/command/remove_binding_constraint.py b/antarest/study/storage/variantstudy/model/command/remove_binding_constraint.py
index ee51c7e641..d497576b22 100644
--- a/antarest/study/storage/variantstudy/model/command/remove_binding_constraint.py
+++ b/antarest/study/storage/variantstudy/model/command/remove_binding_constraint.py
@@ -13,6 +13,7 @@
from typing import Any, Dict, List, Tuple
from antarest.core.model import JSON
+from antarest.study.model import STUDY_VERSION_8_7
from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import DEFAULT_GROUP
from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig
from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy
@@ -57,7 +58,7 @@ def _apply(self, study_data: FileStudy) -> CommandOutput:
new_binding_constraints,
["input", "bindingconstraints", "bindingconstraints"],
)
- if study_data.config.version < 870:
+ if study_data.config.version < STUDY_VERSION_8_7:
study_data.tree.delete(["input", "bindingconstraints", self.id])
else:
existing_files = study_data.tree.get(["input", "bindingconstraints"], depth=1)
diff --git a/antarest/study/storage/variantstudy/model/command/remove_link.py b/antarest/study/storage/variantstudy/model/command/remove_link.py
index a384ccff58..eeb7ad81f7 100644
--- a/antarest/study/storage/variantstudy/model/command/remove_link.py
+++ b/antarest/study/storage/variantstudy/model/command/remove_link.py
@@ -14,6 +14,7 @@
from pydantic import field_validator, model_validator
+from antarest.study.model import STUDY_VERSION_8_2
from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig, transform_name_to_id
from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy
from antarest.study.storage.variantstudy.model.command.common import CommandName, CommandOutput
@@ -137,7 +138,7 @@ def _apply(self, study_data: FileStudy) -> CommandOutput:
output = self._check_link_exists(study_data.config)[0]
if output.status:
- if study_data.config.version < 820:
+ if study_data.config.version < STUDY_VERSION_8_2:
study_data.tree.delete(["input", "links", self.area1, self.area2])
else:
study_data.tree.delete(["input", "links", self.area1, f"{self.area2}_parameters"])
diff --git a/antarest/study/storage/variantstudy/model/command/remove_st_storage.py b/antarest/study/storage/variantstudy/model/command/remove_st_storage.py
index 550587535d..9f60befabe 100644
--- a/antarest/study/storage/variantstudy/model/command/remove_st_storage.py
+++ b/antarest/study/storage/variantstudy/model/command/remove_st_storage.py
@@ -14,6 +14,7 @@
from pydantic import Field
+from antarest.study.model import STUDY_VERSION_8_6
from antarest.study.storage.rawstudy.model.filesystem.config.model import Area, FileStudyTreeConfig
from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy
from antarest.study.storage.variantstudy.model.command.common import CommandName, CommandOutput
@@ -21,7 +22,7 @@
from antarest.study.storage.variantstudy.model.model import CommandDTO
# minimum required version.
-REQUIRED_VERSION = 860
+REQUIRED_VERSION = STUDY_VERSION_8_6
class RemoveSTStorage(ICommand):
diff --git a/antarest/study/storage/variantstudy/model/command/update_binding_constraint.py b/antarest/study/storage/variantstudy/model/command/update_binding_constraint.py
index 043111a312..f6974fb6f9 100644
--- a/antarest/study/storage/variantstudy/model/command/update_binding_constraint.py
+++ b/antarest/study/storage/variantstudy/model/command/update_binding_constraint.py
@@ -13,6 +13,7 @@
import typing as t
from antarest.core.model import JSON
+from antarest.study.model import STUDY_VERSION_8_7
from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import (
DEFAULT_GROUP,
OPERATOR_MATRICES_MAP,
@@ -164,8 +165,9 @@ def _apply(self, study_data: FileStudy) -> CommandOutput:
index, actual_cfg = index_and_cfg
+ study_version = study_data.config.version
# rename matrices if the operator has changed for version >= 870
- if self.operator and study_data.config.version >= 870:
+ if self.operator and study_version >= STUDY_VERSION_8_7:
existing_operator = BindingConstraintOperator(actual_cfg["operator"])
new_operator = self.operator
_update_matrices_names(study_data, self.id, existing_operator, new_operator)
@@ -175,13 +177,12 @@ def _apply(self, study_data: FileStudy) -> CommandOutput:
updated_matrices = [
term for term in [m.value for m in TermMatrices] if hasattr(self, term) and getattr(self, term)
]
- study_version = study_data.config.version
+
time_step = self.time_step or BindingConstraintFrequency(actual_cfg["type"])
self.validates_and_fills_matrices(
time_step=time_step, specific_matrices=updated_matrices or None, version=study_version, create=False
)
- study_version = study_data.config.version
props = create_binding_constraint_config(study_version, **self.model_dump())
obj = props.model_dump(mode="json", by_alias=True, exclude_unset=True)
diff --git a/antarest/study/storage/variantstudy/model/command_context.py b/antarest/study/storage/variantstudy/model/command_context.py
index 5996e63528..f4b9d6f6c7 100644
--- a/antarest/study/storage/variantstudy/model/command_context.py
+++ b/antarest/study/storage/variantstudy/model/command_context.py
@@ -10,14 +10,13 @@
#
# This file is part of the Antares project.
-from pydantic import BaseModel
-
+from antarest.core.serialization import AntaresBaseModel
from antarest.matrixstore.service import ISimpleMatrixService
from antarest.study.storage.patch_service import PatchService
from antarest.study.storage.variantstudy.business.matrix_constants_generator import GeneratorMatrixConstants
-class CommandContext(BaseModel):
+class CommandContext(AntaresBaseModel):
generator_matrix_constants: GeneratorMatrixConstants
matrix_service: ISimpleMatrixService
patch_service: PatchService
diff --git a/antarest/study/storage/variantstudy/model/model.py b/antarest/study/storage/variantstudy/model/model.py
index 0be3c75353..3814b2519c 100644
--- a/antarest/study/storage/variantstudy/model/model.py
+++ b/antarest/study/storage/variantstudy/model/model.py
@@ -14,9 +14,9 @@
import uuid
import typing_extensions as te
-from pydantic import BaseModel
from antarest.core.model import JSON
+from antarest.core.serialization import AntaresBaseModel
from antarest.study.model import StudyMetadataDTO
LegacyDetailsDTO = t.Tuple[str, bool, str]
@@ -45,7 +45,7 @@ class NewDetailsDTO(te.TypedDict):
DetailsDTO = t.Union[LegacyDetailsDTO, NewDetailsDTO]
-class GenerationResultInfoDTO(BaseModel):
+class GenerationResultInfoDTO(AntaresBaseModel):
"""
Result information of a snapshot generation process.
@@ -58,7 +58,7 @@ class GenerationResultInfoDTO(BaseModel):
details: t.MutableSequence[DetailsDTO]
-class CommandDTO(BaseModel):
+class CommandDTO(AntaresBaseModel):
"""
This class represents a command.
@@ -75,7 +75,7 @@ class CommandDTO(BaseModel):
version: int = 1
-class CommandResultDTO(BaseModel):
+class CommandResultDTO(AntaresBaseModel):
"""
This class represents the result of a command.
diff --git a/antarest/tools/cli.py b/antarest/tools/cli.py
index 85eef22128..f7735b891e 100644
--- a/antarest/tools/cli.py
+++ b/antarest/tools/cli.py
@@ -15,7 +15,7 @@
from typing import Optional
import click
-from httpx import Client
+from antares.study.version import StudyVersion
from antarest.study.model import NEW_DEFAULT_STUDY_VERSION
from antarest.study.storage.study_upgrader import StudyUpgrader
@@ -81,7 +81,7 @@ def commands() -> None:
required=False,
type=str,
help=f"Study version. Default:{NEW_DEFAULT_STUDY_VERSION}",
- default=NEW_DEFAULT_STUDY_VERSION,
+ default=f"{NEW_DEFAULT_STUDY_VERSION:ddd}",
)
def cli_apply_script(
input: str,
@@ -102,11 +102,11 @@ def cli_apply_script(
if host is not None and study_id is None:
print("--study_id must be set")
exit(1)
-
+ study_version = StudyVersion.parse(version)
client = None
if host:
client = create_http_client(verify=not no_verify, auth_token=auth_token)
- res = generate_study(Path(input), study_id, output, host, client, version)
+ res = generate_study(Path(input), study_id, output, host, client, study_version)
print(res)
@@ -162,11 +162,11 @@ def cli_generate_script(input: str, output: str) -> None:
required=False,
type=str,
help=f"Study version. Default:{NEW_DEFAULT_STUDY_VERSION}",
- default=NEW_DEFAULT_STUDY_VERSION,
+ default=f"{NEW_DEFAULT_STUDY_VERSION:ddd}",
)
def cli_generate_script_diff(base: str, variant: str, output: str, version: str) -> None:
"""Generate variant script commands from two variant script directories"""
- generate_diff(Path(base), Path(variant), Path(output), version)
+ generate_diff(Path(base), Path(variant), Path(output), StudyVersion.parse(version))
@commands.command("upgrade-study")
diff --git a/antarest/tools/lib.py b/antarest/tools/lib.py
index 5f3aafc74a..da6d4db43b 100644
--- a/antarest/tools/lib.py
+++ b/antarest/tools/lib.py
@@ -20,6 +20,7 @@
from zipfile import ZipFile
import numpy as np
+from antares.study.version import StudyVersion
from httpx import Client
from antarest.core.cache.business.local_chache import LocalCache
@@ -129,7 +130,7 @@ class LocalVariantGenerator(IVariantGenerator):
def __init__(self, output_path: Path):
self.output_path = output_path
- def render_template(self, study_version: str = NEW_DEFAULT_STUDY_VERSION) -> None:
+ def render_template(self, study_version: StudyVersion = NEW_DEFAULT_STUDY_VERSION) -> None:
version_template = STUDY_REFERENCE_TEMPLATES[study_version]
empty_study_zip = get_local_path() / "resources" / version_template
with ZipFile(empty_study_zip) as zip_output:
@@ -216,7 +217,7 @@ def generate_diff(
base: Path,
variant: Path,
output_dir: Path,
- study_version: str = NEW_DEFAULT_STUDY_VERSION,
+ study_version: StudyVersion = NEW_DEFAULT_STUDY_VERSION,
) -> None:
"""
Generate variant script commands from two variant script directories.
@@ -328,7 +329,7 @@ def generate_study(
output: Optional[str] = None,
host: Optional[str] = None,
session: Optional[Client] = None,
- study_version: str = NEW_DEFAULT_STUDY_VERSION,
+ study_version: StudyVersion = NEW_DEFAULT_STUDY_VERSION,
) -> GenerationResultInfoDTO:
"""
Generate a new study or update an existing study by applying commands.
diff --git a/antarest/worker/archive_worker.py b/antarest/worker/archive_worker.py
index 4fbc6a0631..a488d42d0d 100644
--- a/antarest/worker/archive_worker.py
+++ b/antarest/worker/archive_worker.py
@@ -13,10 +13,9 @@
import logging
from pathlib import Path
-from pydantic import BaseModel
-
from antarest.core.config import Config
from antarest.core.interfaces.eventbus import IEventBus
+from antarest.core.serialization import AntaresBaseModel
from antarest.core.tasks.model import TaskResult
from antarest.core.utils.utils import StopWatch, unzip
from antarest.worker.worker import AbstractWorker, WorkerTaskCommand
@@ -24,7 +23,7 @@
logger = logging.getLogger(__name__)
-class ArchiveTaskArgs(BaseModel):
+class ArchiveTaskArgs(AntaresBaseModel):
src: str
dest: str
remove_src: bool = False
diff --git a/antarest/worker/simulator_worker.py b/antarest/worker/simulator_worker.py
index 583f340e7a..5dba1d13db 100644
--- a/antarest/worker/simulator_worker.py
+++ b/antarest/worker/simulator_worker.py
@@ -18,11 +18,10 @@
from pathlib import Path
from typing import cast
-from pydantic import BaseModel
-
from antarest.core.cache.business.local_chache import LocalCache
from antarest.core.config import Config, LocalConfig
from antarest.core.interfaces.eventbus import IEventBus
+from antarest.core.serialization import AntaresBaseModel
from antarest.core.tasks.model import TaskResult
from antarest.core.utils.fastapi_sqlalchemy import db
from antarest.launcher.adapters.log_manager import follow
@@ -38,7 +37,7 @@
GENERATE_KIRSHOFF_CONSTRAINTS_TASK_NAME = "generate-kirshoff-constraints"
-class GenerateTimeseriesTaskArgs(BaseModel):
+class GenerateTimeseriesTaskArgs(AntaresBaseModel):
study_id: str
study_path: str
managed: bool
diff --git a/antarest/worker/worker.py b/antarest/worker/worker.py
index 7dc2534764..f73dd551f0 100644
--- a/antarest/worker/worker.py
+++ b/antarest/worker/worker.py
@@ -16,11 +16,10 @@
from concurrent.futures import Future, ThreadPoolExecutor
from typing import Any, Dict, List, Union
-from pydantic import BaseModel
-
from antarest.core.interfaces.eventbus import Event, EventType, IEventBus
from antarest.core.interfaces.service import IService
from antarest.core.model import PermissionInfo, PublicMode
+from antarest.core.serialization import AntaresBaseModel
from antarest.core.tasks.model import TaskResult
logger = logging.getLogger(__name__)
@@ -28,12 +27,12 @@
MAX_WORKERS = 10
-class WorkerTaskResult(BaseModel):
+class WorkerTaskResult(AntaresBaseModel):
task_id: str
task_result: TaskResult
-class WorkerTaskCommand(BaseModel):
+class WorkerTaskCommand(AntaresBaseModel):
task_id: str
task_type: str
task_args: Dict[str, Union[int, float, bool, str]]
diff --git a/tests/cache/test_local_cache.py b/tests/cache/test_local_cache.py
index b779a2df08..dc6382cac5 100644
--- a/tests/cache/test_local_cache.py
+++ b/tests/cache/test_local_cache.py
@@ -14,6 +14,8 @@
from pathlib import Path
from unittest import mock
+from antares.study.version import StudyVersion
+
from antarest.core.cache.business.local_chache import LocalCache, LocalCacheElement
from antarest.core.config import CacheConfig
from antarest.study.storage.rawstudy.model.filesystem.config.model import Area, FileStudyTreeConfigDTO
@@ -26,7 +28,7 @@ def test_lifecycle():
study_path=Path("somepath"),
path=Path("somepath"),
study_id="",
- version=-1,
+ version=StudyVersion.parse(0),
areas={
"a1": Area(
name="a1",
diff --git a/tests/cache/test_redis_cache.py b/tests/cache/test_redis_cache.py
index 54affdc8f7..808131fabb 100644
--- a/tests/cache/test_redis_cache.py
+++ b/tests/cache/test_redis_cache.py
@@ -13,6 +13,8 @@
from pathlib import Path
from unittest.mock import Mock
+from antares.study.version import StudyVersion
+
from antarest.core.cache.business.redis_cache import RedisCache, RedisCacheElement
from antarest.core.serialization import from_json
from antarest.study.storage.rawstudy.model.filesystem.config.model import Area, FileStudyTreeConfigDTO
@@ -25,7 +27,7 @@ def test_lifecycle():
study_path=Path("somepath"),
path=Path("somepath"),
study_id="",
- version=-1,
+ version=StudyVersion.parse(0),
areas={
"a1": Area(
name="a1",
diff --git a/tests/integration/studies_blueprint/test_study_version.py b/tests/integration/studies_blueprint/test_study_version.py
new file mode 100644
index 0000000000..8a4e603450
--- /dev/null
+++ b/tests/integration/studies_blueprint/test_study_version.py
@@ -0,0 +1,84 @@
+# Copyright (c) 2024, RTE (https://www.rte-france.com)
+#
+# See AUTHORS.txt
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+#
+# SPDX-License-Identifier: MPL-2.0
+#
+# This file is part of the Antares project.
+
+import io
+import os
+import zipfile
+from pathlib import Path
+
+from starlette.testclient import TestClient
+
+from tests.integration.assets import ASSETS_DIR
+
+
+class TestStudyVersions:
+ """
+ This class contains tests related to the handling of different study versions
+ """
+
+ def test_nominal_case(self, client: TestClient, user_access_token: str, tmp_path: str) -> None:
+ # =============================
+ # SET UP
+ # =============================
+ client.headers = {"Authorization": f"Bearer {user_access_token}"}
+
+ data = """[antares]
+version = 9.0
+caption = test_version_90
+created = 1682506382.235618
+lastsave = 1682506382.23562
+author = Unknown
+"""
+ tmp_dir = Path(tmp_path)
+ zip_path = ASSETS_DIR / "STA-mini.zip"
+ # Extract zip inside tmp_dir
+ new_zip_path = tmp_dir / "test_version_90"
+ with zipfile.ZipFile(zip_path, "r") as zip_ref:
+ zip_ref.extractall(new_zip_path)
+
+ # Change file content
+ target_path = os.path.join(new_zip_path, "STA-mini", "study.antares")
+ with open(target_path, "w") as file:
+ file.write(data)
+
+ final_path = tmp_dir / "test_version_90.zip"
+ # Rezip it
+ with zipfile.ZipFile(final_path, "w", zipfile.ZIP_DEFLATED) as zipf:
+ for root, dirs, files in os.walk(new_zip_path):
+ for file in files:
+ file_path = os.path.join(root, file)
+ zipf.write(file_path, os.path.relpath(file_path, new_zip_path))
+
+ # =============================
+ # LIFECYCLE
+ # =============================
+
+ for f in [zip_path, final_path]:
+ # Imports a study
+ res = client.post("/v1/studies/_import", files={"study": io.BytesIO(f.read_bytes())})
+ res.raise_for_status()
+ study_id = res.json()
+
+ # Gets study information
+ res = client.get(f"v1/studies/{study_id}")
+ res.raise_for_status()
+ assert res.json()["version"] == 900 if f == final_path else 700
+
+ # Reads `study.version` file
+ res = client.get(f"v1/studies/{study_id}/raw?path=study")
+ res.raise_for_status()
+ version = str(res.json()["antares"]["version"])
+ assert version == "9.0" if f == final_path else "700"
+
+ # Delete the study
+ res = client.delete(f"v1/studies/{study_id}")
+ res.raise_for_status()
diff --git a/tests/integration/study_data_blueprint/test_thermal.py b/tests/integration/study_data_blueprint/test_thermal.py
index 6567fc205f..6328606247 100644
--- a/tests/integration/study_data_blueprint/test_thermal.py
+++ b/tests/integration/study_data_blueprint/test_thermal.py
@@ -297,22 +297,17 @@ class TestThermal:
@pytest.mark.parametrize(
"version", [pytest.param(0, id="No Upgrade"), pytest.param(860, id="v8.6"), pytest.param(870, id="v8.7")]
)
- def test_lifecycle(
- self, client: TestClient, user_access_token: str, internal_study_id: str, admin_access_token: str, version: int
- ) -> None:
+ def test_lifecycle(self, client: TestClient, user_access_token: str, internal_study_id: str, version: int) -> None:
+ client.headers = {"Authorization": f"Bearer {user_access_token}"}
# =============================
# STUDY UPGRADE
# =============================
if version != 0:
- res = client.put(
- f"/v1/studies/{internal_study_id}/upgrade",
- headers={"Authorization": f"Bearer {admin_access_token}"},
- params={"target_version": version},
- )
+ res = client.put(f"/v1/studies/{internal_study_id}/upgrade", params={"target_version": version})
res.raise_for_status()
task_id = res.json()
- task = wait_task_completion(client, admin_access_token, task_id)
+ task = wait_task_completion(client, user_access_token, task_id)
from antarest.core.tasks.model import TaskStatus
assert task.status == TaskStatus.COMPLETED, task
@@ -347,14 +342,18 @@ def test_lifecycle(
# or an invalid name should also raise a validation error.
attempts = [{}, {"name": ""}, {"name": "!??"}]
for attempt in attempts:
- res = client.post(
- f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal",
- headers={"Authorization": f"Bearer {user_access_token}"},
- json=attempt,
- )
+ res = client.post(f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal", json=attempt)
assert res.status_code == 422, res.json()
assert res.json()["exception"] in {"ValidationError", "RequestValidationError"}, res.json()
+ # creating a thermal cluster with a name as a string should not raise an Exception
+ res = client.post(f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal", json={"name": 111})
+ assert res.status_code == 200, res.json()
+ res = client.request(
+ "DELETE", f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal", json=["111"]
+ )
+ assert res.status_code == 204, res.json()
+
# We can create a thermal cluster with the following properties:
fr_gas_conventional_props = {
**DEFAULT_PROPERTIES,
@@ -371,9 +370,7 @@ def test_lifecycle(
"marketBidCost": 181.267,
}
res = client.post(
- f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal",
- headers={"Authorization": f"Bearer {user_access_token}"},
- json=fr_gas_conventional_props,
+ f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal", json=fr_gas_conventional_props
)
assert res.status_code == 200, res.json()
fr_gas_conventional_id = res.json()["id"]
@@ -395,13 +392,14 @@ def test_lifecycle(
assert res.json() == fr_gas_conventional_cfg
# reading the properties of a thermal cluster
- res = client.get(
- f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal/{fr_gas_conventional_id}",
- headers={"Authorization": f"Bearer {user_access_token}"},
- )
+ res = client.get(f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal/{fr_gas_conventional_id}")
assert res.status_code == 200, res.json()
assert res.json() == fr_gas_conventional_cfg
+ # asserts it didn't break the allocation matrix
+ res = client.get(f"/v1/studies/{internal_study_id}/areas/{area_id}/hydro/allocation/form")
+ assert res.status_code == 200, res.json()
+
# ==========================
# THERMAL CLUSTER MATRICES
# ==========================
@@ -410,17 +408,11 @@ def test_lifecycle(
matrix_path = f"input/thermal/prepro/{area_id}/{fr_gas_conventional_id.lower()}/data"
args = {"target": matrix_path, "matrix": matrix}
res = client.post(
- f"/v1/studies/{internal_study_id}/commands",
- json=[{"action": "replace_matrix", "args": args}],
- headers={"Authorization": f"Bearer {user_access_token}"},
+ f"/v1/studies/{internal_study_id}/commands", json=[{"action": "replace_matrix", "args": args}]
)
assert res.status_code in {200, 201}, res.json()
- res = client.get(
- f"/v1/studies/{internal_study_id}/raw",
- params={"path": matrix_path},
- headers={"Authorization": f"Bearer {user_access_token}"},
- )
+ res = client.get(f"/v1/studies/{internal_study_id}/raw", params={"path": matrix_path})
assert res.status_code == 200
assert res.json()["data"] == matrix
@@ -429,17 +421,13 @@ def test_lifecycle(
# ==================================
# Reading the list of thermal clusters
- res = client.get(
- f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal",
- headers={"Authorization": f"Bearer {user_access_token}"},
- )
+ res = client.get(f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal")
assert res.status_code == 200, res.json()
assert res.json() == EXISTING_CLUSTERS + [fr_gas_conventional_cfg]
# updating properties
res = client.patch(
f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal/{fr_gas_conventional_id}",
- headers={"Authorization": f"Bearer {user_access_token}"},
json={
"name": "FR_Gas conventional old 1",
"nominalCapacity": 32.1,
@@ -453,10 +441,7 @@ def test_lifecycle(
}
assert res.json() == fr_gas_conventional_cfg
- res = client.get(
- f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal/{fr_gas_conventional_id}",
- headers={"Authorization": f"Bearer {user_access_token}"},
- )
+ res = client.get(f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal/{fr_gas_conventional_id}")
assert res.status_code == 200, res.json()
assert res.json() == fr_gas_conventional_cfg
@@ -467,7 +452,6 @@ def test_lifecycle(
# updating properties
res = client.patch(
f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal/{fr_gas_conventional_id}",
- headers={"Authorization": f"Bearer {user_access_token}"},
json={
"marginalCost": 182.456,
"startupCost": 6140.8,
@@ -489,24 +473,19 @@ def test_lifecycle(
bad_properties = {"unitCount": 0}
res = client.patch(
f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal/{fr_gas_conventional_id}",
- headers={"Authorization": f"Bearer {user_access_token}"},
json=bad_properties,
)
assert res.status_code == 422, res.json()
assert res.json()["exception"] == "RequestValidationError", res.json()
# The thermal cluster properties should not have been updated.
- res = client.get(
- f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal/{fr_gas_conventional_id}",
- headers={"Authorization": f"Bearer {user_access_token}"},
- )
+ res = client.get(f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal/{fr_gas_conventional_id}")
assert res.status_code == 200, res.json()
assert res.json() == fr_gas_conventional_cfg
# Update with a pollutant. Should succeed even with versions prior to v8.6
res = client.patch(
f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal/{fr_gas_conventional_id}",
- headers={"Authorization": f"Bearer {user_access_token}"},
json={"nox": 10.0},
)
assert res.status_code == 200
@@ -514,7 +493,6 @@ def test_lifecycle(
# Update with the field `efficiency`. Should succeed even with versions prior to v8.7
res = client.patch(
f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal/{fr_gas_conventional_id}",
- headers={"Authorization": f"Bearer {user_access_token}"},
json={"efficiency": 97.0},
)
assert res.status_code == 200
@@ -526,7 +504,6 @@ def test_lifecycle(
new_name = "Duplicate of Fr_Gas_Conventional"
res = client.post(
f"/v1/studies/{internal_study_id}/areas/{area_id}/thermals/{fr_gas_conventional_id}",
- headers={"Authorization": f"Bearer {user_access_token}"},
params={"newName": new_name},
)
assert res.status_code in {200, 201}, res.json()
@@ -544,11 +521,7 @@ def test_lifecycle(
# asserts the matrix has also been duplicated
new_cluster_matrix_path = f"input/thermal/prepro/{area_id}/{duplicated_id.lower()}/data"
- res = client.get(
- f"/v1/studies/{internal_study_id}/raw",
- params={"path": new_cluster_matrix_path},
- headers={"Authorization": f"Bearer {user_access_token}"},
- )
+ res = client.get(f"/v1/studies/{internal_study_id}/raw", params={"path": new_cluster_matrix_path})
assert res.status_code == 200
assert res.json()["data"] == matrix
@@ -558,8 +531,7 @@ def test_lifecycle(
# Everything is fine at the beginning
res = client.get(
- f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal/{fr_gas_conventional_id}/validate",
- headers={"Authorization": f"Bearer {user_access_token}"},
+ f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal/{fr_gas_conventional_id}/validate"
)
assert res.status_code == 200
assert res.json() is True
@@ -575,8 +547,7 @@ def test_lifecycle(
# Validation should fail
res = client.get(
- f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal/{fr_gas_conventional_id}/validate",
- headers={"Authorization": f"Bearer {user_access_token}"},
+ f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal/{fr_gas_conventional_id}/validate"
)
assert res.status_code == 422
obj = res.json()
@@ -594,8 +565,7 @@ def test_lifecycle(
# Validation should succeed again
res = client.get(
- f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal/{fr_gas_conventional_id}/validate",
- headers={"Authorization": f"Bearer {user_access_token}"},
+ f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal/{fr_gas_conventional_id}/validate"
)
assert res.status_code == 200
assert res.json() is True
@@ -612,8 +582,7 @@ def test_lifecycle(
# Validation should fail
res = client.get(
- f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal/{fr_gas_conventional_id}/validate",
- headers={"Authorization": f"Bearer {user_access_token}"},
+ f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal/{fr_gas_conventional_id}/validate"
)
assert res.status_code == 422
obj = res.json()
@@ -648,19 +617,12 @@ def test_lifecycle(
bc_obj["lessTermMatrix"] = matrix.tolist()
# noinspection SpellCheckingInspection
- res = client.post(
- f"/v1/studies/{internal_study_id}/bindingconstraints",
- json=bc_obj,
- headers={"Authorization": f"Bearer {user_access_token}"},
- )
+ res = client.post(f"/v1/studies/{internal_study_id}/bindingconstraints", json=bc_obj)
assert res.status_code in {200, 201}, res.json()
# verify that we can't delete the thermal cluster because it is referenced in a binding constraint
res = client.request(
- "DELETE",
- f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal",
- headers={"Authorization": f"Bearer {user_access_token}"},
- json=[fr_gas_conventional_id],
+ "DELETE", f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal", json=[fr_gas_conventional_id]
)
assert res.status_code == 403, res.json()
description = res.json()["description"]
@@ -668,37 +630,23 @@ def test_lifecycle(
assert res.json()["exception"] == "ReferencedObjectDeletionNotAllowed"
# delete the binding constraint
- res = client.delete(
- f"/v1/studies/{internal_study_id}/bindingconstraints/{bc_obj['name']}",
- headers={"Authorization": f"Bearer {user_access_token}"},
- )
+ res = client.delete(f"/v1/studies/{internal_study_id}/bindingconstraints/{bc_obj['name']}")
assert res.status_code == 200, res.json()
# Now we can delete the thermal cluster
res = client.request(
- "DELETE",
- f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal",
- headers={"Authorization": f"Bearer {user_access_token}"},
- json=[fr_gas_conventional_id],
+ "DELETE", f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal", json=[fr_gas_conventional_id]
)
assert res.status_code == 204, res.json()
# check that the binding constraint has been deleted
# noinspection SpellCheckingInspection
- res = client.get(
- f"/v1/studies/{internal_study_id}/bindingconstraints",
- headers={"Authorization": f"Bearer {user_access_token}"},
- )
+ res = client.get(f"/v1/studies/{internal_study_id}/bindingconstraints")
assert res.status_code == 200, res.json()
assert len(res.json()) == 0
# If the thermal cluster list is empty, the deletion should be a no-op.
- res = client.request(
- "DELETE",
- f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal",
- headers={"Authorization": f"Bearer {user_access_token}"},
- json=[],
- )
+ res = client.request("DELETE", f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal", json=[])
assert res.status_code == 204, res.json()
assert res.text in {"", "null"} # Old FastAPI versions return 'null'.
@@ -709,17 +657,13 @@ def test_lifecycle(
res = client.request(
"DELETE",
f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal",
- headers={"Authorization": f"Bearer {user_access_token}"},
json=[other_cluster_id1, other_cluster_id2],
)
assert res.status_code == 204, res.json()
assert res.text in {"", "null"} # Old FastAPI versions return 'null'.
# The list of thermal clusters should not contain the deleted ones.
- res = client.get(
- f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal",
- headers={"Authorization": f"Bearer {user_access_token}"},
- )
+ res = client.get(f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal")
assert res.status_code == 200, res.json()
deleted_clusters = [other_cluster_id1, other_cluster_id2, fr_gas_conventional_id]
for cluster in res.json():
@@ -734,7 +678,6 @@ def test_lifecycle(
res = client.request(
"DELETE",
f"/v1/studies/{internal_study_id}/areas/{bad_area_id}/clusters/thermal",
- headers={"Authorization": f"Bearer {user_access_token}"},
json=[fr_gas_conventional_id],
)
assert res.status_code == 500, res.json()
@@ -750,10 +693,7 @@ def test_lifecycle(
# Check DELETE with the wrong value of `study_id`
bad_study_id = "bad_study"
res = client.request(
- "DELETE",
- f"/v1/studies/{bad_study_id}/areas/{area_id}/clusters/thermal",
- headers={"Authorization": f"Bearer {user_access_token}"},
- json=[fr_gas_conventional_id],
+ "DELETE", f"/v1/studies/{bad_study_id}/areas/{area_id}/clusters/thermal", json=[fr_gas_conventional_id]
)
obj = res.json()
description = obj["description"]
@@ -762,8 +702,7 @@ def test_lifecycle(
# Check GET with wrong `area_id`
res = client.get(
- f"/v1/studies/{internal_study_id}/areas/{bad_area_id}/clusters/thermal/{fr_gas_conventional_id}",
- headers={"Authorization": f"Bearer {user_access_token}"},
+ f"/v1/studies/{internal_study_id}/areas/{bad_area_id}/clusters/thermal/{fr_gas_conventional_id}"
)
obj = res.json()
description = obj["description"]
@@ -771,10 +710,7 @@ def test_lifecycle(
assert res.status_code == 404, res.json()
# Check GET with wrong `study_id`
- res = client.get(
- f"/v1/studies/{bad_study_id}/areas/{area_id}/clusters/thermal/{fr_gas_conventional_id}",
- headers={"Authorization": f"Bearer {user_access_token}"},
- )
+ res = client.get(f"/v1/studies/{bad_study_id}/areas/{area_id}/clusters/thermal/{fr_gas_conventional_id}")
obj = res.json()
description = obj["description"]
assert res.status_code == 404, res.json()
@@ -783,7 +719,6 @@ def test_lifecycle(
# Check POST with wrong `study_id`
res = client.post(
f"/v1/studies/{bad_study_id}/areas/{area_id}/clusters/thermal",
- headers={"Authorization": f"Bearer {user_access_token}"},
json={"name": fr_gas_conventional, "group": "Battery"},
)
obj = res.json()
@@ -794,7 +729,6 @@ def test_lifecycle(
# Check POST with wrong `area_id`
res = client.post(
f"/v1/studies/{internal_study_id}/areas/{bad_area_id}/clusters/thermal",
- headers={"Authorization": f"Bearer {user_access_token}"},
json={
"name": fr_gas_conventional,
"group": "Oil",
@@ -817,7 +751,6 @@ def test_lifecycle(
# Check POST with wrong `group`
res = client.post(
f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal",
- headers={"Authorization": f"Bearer {user_access_token}"},
json={"name": fr_gas_conventional, "group": "GroupFoo"},
)
assert res.status_code == 200, res.json()
@@ -828,7 +761,6 @@ def test_lifecycle(
# Check PATCH with the wrong `area_id`
res = client.patch(
f"/v1/studies/{internal_study_id}/areas/{bad_area_id}/clusters/thermal/{fr_gas_conventional_id}",
- headers={"Authorization": f"Bearer {user_access_token}"},
json={
"group": "Oil",
"unitCount": 1,
@@ -850,7 +782,6 @@ def test_lifecycle(
bad_cluster_id = "bad_cluster"
res = client.patch(
f"/v1/studies/{internal_study_id}/areas/{area_id}/clusters/thermal/{bad_cluster_id}",
- headers={"Authorization": f"Bearer {user_access_token}"},
json={
"group": "Oil",
"unitCount": 1,
@@ -871,7 +802,6 @@ def test_lifecycle(
# Check PATCH with the wrong `study_id`
res = client.patch(
f"/v1/studies/{bad_study_id}/areas/{area_id}/clusters/thermal/{fr_gas_conventional_id}",
- headers={"Authorization": f"Bearer {user_access_token}"},
json={
"group": "Oil",
"unitCount": 1,
@@ -891,9 +821,7 @@ def test_lifecycle(
# Cannot duplicate a fake cluster
unknown_id = "unknown"
res = client.post(
- f"/v1/studies/{internal_study_id}/areas/{area_id}/thermals/{unknown_id}",
- headers={"Authorization": f"Bearer {user_access_token}"},
- params={"newName": "duplicate"},
+ f"/v1/studies/{internal_study_id}/areas/{area_id}/thermals/{unknown_id}", params={"newName": "duplicate"}
)
assert res.status_code == 404, res.json()
obj = res.json()
@@ -903,7 +831,6 @@ def test_lifecycle(
# Cannot duplicate with an existing id
res = client.post(
f"/v1/studies/{internal_study_id}/areas/{area_id}/thermals/{duplicated_id}",
- headers={"Authorization": f"Bearer {user_access_token}"},
params={"newName": new_name.upper()}, # different case but same ID
)
assert res.status_code == 409, res.json()
diff --git a/tests/integration/test_integration_variantmanager_tool.py b/tests/integration/test_integration_variantmanager_tool.py
index 85783fe789..a14a0f9a36 100644
--- a/tests/integration/test_integration_variantmanager_tool.py
+++ b/tests/integration/test_integration_variantmanager_tool.py
@@ -28,7 +28,6 @@
COMMAND_FILE,
MATRIX_STORE_DIR,
RemoteVariantGenerator,
- create_http_client,
extract_commands,
generate_diff,
generate_study,
diff --git a/tests/launcher/test_slurm_launcher.py b/tests/launcher/test_slurm_launcher.py
index 4482eee71a..64ae611217 100644
--- a/tests/launcher/test_slurm_launcher.py
+++ b/tests/launcher/test_slurm_launcher.py
@@ -19,6 +19,7 @@
from unittest.mock import ANY, Mock, patch
import pytest
+from antares.study.version import SolverVersion
from antareslauncher.data_repo.data_repo_tinydb import DataRepoTinydb
from antareslauncher.main import MainParameters
from antareslauncher.study_dto import StudyDTO
@@ -308,7 +309,7 @@ def call_launcher_mock(arguments: Namespace, parameters: MainParameters):
# When the launcher is called
study_uuid = str(uuid.uuid4())
- slurm_launcher._run_study(study_uuid, job_id, LauncherParametersDTO(), str(version))
+ slurm_launcher._run_study(study_uuid, job_id, LauncherParametersDTO(), SolverVersion.parse(version))
# Check the results
assert (
diff --git a/tests/storage/business/test_config_manager.py b/tests/storage/business/test_config_manager.py
index f89a4438be..fa9e3b887a 100644
--- a/tests/storage/business/test_config_manager.py
+++ b/tests/storage/business/test_config_manager.py
@@ -13,6 +13,8 @@
from pathlib import Path
from unittest.mock import Mock
+from antares.study.version import StudyVersion
+
from antarest.study.business.thematic_trimming_field_infos import FIELDS_INFO
from antarest.study.business.thematic_trimming_management import (
ThematicTrimmingFormFields,
@@ -66,27 +68,27 @@ def test_thematic_trimming_config() -> None:
study.version = config.version = 700
actual = thematic_trimming_manager.get_field_values(study)
- fields_info = get_fields_info(int(study.version))
+ fields_info = get_fields_info(StudyVersion.parse(study.version))
expected = ThematicTrimmingFormFields(**dict.fromkeys(fields_info, True))
assert actual == expected
study.version = config.version = 800
actual = thematic_trimming_manager.get_field_values(study)
- fields_info = get_fields_info(int(study.version))
+ fields_info = get_fields_info(StudyVersion.parse(study.version))
expected = ThematicTrimmingFormFields(**dict.fromkeys(fields_info, True))
expected.avl_dtg = False
assert actual == expected
study.version = config.version = 820
actual = thematic_trimming_manager.get_field_values(study)
- fields_info = get_fields_info(int(study.version))
+ fields_info = get_fields_info(StudyVersion.parse(study.version))
expected = ThematicTrimmingFormFields(**dict.fromkeys(fields_info, True))
expected.avl_dtg = False
assert actual == expected
study.version = config.version = 830
actual = thematic_trimming_manager.get_field_values(study)
- fields_info = get_fields_info(int(study.version))
+ fields_info = get_fields_info(StudyVersion.parse(study.version))
expected = ThematicTrimmingFormFields(**dict.fromkeys(fields_info, True))
expected.dens = False
expected.profit_by_plant = False
@@ -94,7 +96,7 @@ def test_thematic_trimming_config() -> None:
study.version = config.version = 840
actual = thematic_trimming_manager.get_field_values(study)
- fields_info = get_fields_info(int(study.version))
+ fields_info = get_fields_info(StudyVersion.parse(study.version))
expected = ThematicTrimmingFormFields(**dict.fromkeys(fields_info, False))
expected.cong_fee_alg = True
assert actual == expected
diff --git a/tests/storage/business/test_raw_study_service.py b/tests/storage/business/test_raw_study_service.py
index 9364edfa14..2e9bb25c5a 100644
--- a/tests/storage/business/test_raw_study_service.py
+++ b/tests/storage/business/test_raw_study_service.py
@@ -621,7 +621,7 @@ def test_check_and_update_study_version_in_database(tmp_path: Path) -> None:
assert raw_study.version == "100"
- raw_study = RawStudy(id=name, workspace="foo", path=str(study_path), version="100")
+ raw_study = RawStudy(id=name, workspace="foo", path=str(study_path), version="42")
file_study_tree = Mock()
file_study_tree.get.return_value = {"version": 42}
diff --git a/tests/storage/repository/filesystem/config/test_config_files.py b/tests/storage/repository/filesystem/config/test_config_files.py
index a0fdf6d4f5..77ae462678 100644
--- a/tests/storage/repository/filesystem/config/test_config_files.py
+++ b/tests/storage/repository/filesystem/config/test_config_files.py
@@ -81,7 +81,7 @@ def test_parse_output_parameters(study_path: Path) -> None:
config = FileStudyTreeConfig(
study_path=study_path,
path=study_path,
- version=-1,
+ version=0,
store_new_set=True,
study_id="id",
output_path=study_path / "output",
@@ -105,7 +105,7 @@ def test_parse_bindings(study_path: Path) -> None:
config = FileStudyTreeConfig(
study_path=study_path,
path=study_path,
- version=-1,
+ version=0,
bindings=[
BindingConstraintDTO(
id="bindA",
@@ -154,7 +154,7 @@ def test_parse_outputs(study_path: Path) -> None:
study_path=study_path,
path=study_path,
study_id="id",
- version=-1,
+ version=0,
output_path=study_path / "output",
outputs={
"20201220-1456eco-hello": Simulation(
@@ -271,7 +271,7 @@ def test_parse_area(study_path: Path) -> None:
study_path=study_path,
path=study_path,
study_id="id",
- version=-1,
+ version=0,
output_path=study_path / "output",
areas={
"fr": Area(
@@ -307,7 +307,7 @@ def test_parse_area__extra_area(study_path: Path) -> None:
study_path=study_path,
path=study_path,
study_id="id",
- version=-1,
+ version=0,
output_path=study_path / "output",
areas={
"fr": Area(
diff --git a/tests/storage/test_service.py b/tests/storage/test_service.py
index 47ba9fe49c..0944e7b800 100644
--- a/tests/storage/test_service.py
+++ b/tests/storage/test_service.py
@@ -110,7 +110,7 @@ def study_to_dto(study: Study) -> StudyMetadataDTO:
return StudyMetadataDTO(
id=study.id,
name=study.name,
- version=int(study.version),
+ version=study.version,
created=str(study.created_at),
updated=str(study.updated_at),
workspace=DEFAULT_WORKSPACE_NAME,
diff --git a/tests/storage/web/test_studies_bp.py b/tests/storage/web/test_studies_bp.py
index 33cedcc8df..6774f6127e 100644
--- a/tests/storage/web/test_studies_bp.py
+++ b/tests/storage/web/test_studies_bp.py
@@ -553,4 +553,4 @@ def test_get_study_versions(tmp_path: Path) -> None:
client = create_test_client(Mock(), raise_server_exceptions=False)
result = client.get("/v1/studies/_versions")
- assert result.json() == list(STUDY_REFERENCE_TEMPLATES.keys())
+ assert result.json() == [f"{v:ddd}" for v in STUDY_REFERENCE_TEMPLATES]
diff --git a/tests/study/business/test_all_optional_metaclass.py b/tests/study/business/test_all_optional_metaclass.py
index b8d1197c5e..5001019595 100644
--- a/tests/study/business/test_all_optional_metaclass.py
+++ b/tests/study/business/test_all_optional_metaclass.py
@@ -10,12 +10,13 @@
#
# This file is part of the Antares project.
-from pydantic import BaseModel, Field
+from pydantic import Field
+from antarest.core.serialization import AntaresBaseModel
from antarest.study.business.all_optional_meta import all_optional_model, camel_case_model
-class Model(BaseModel):
+class Model(AntaresBaseModel):
float_with_default: float = 1
float_without_default: float
boolean_with_default: bool = True