diff --git a/antarest/__init__.py b/antarest/__init__.py index cc3ee7ecf7..29e8eb67b6 100644 --- a/antarest/__init__.py +++ b/antarest/__init__.py @@ -7,9 +7,9 @@ # Standard project metadata -__version__ = "2.14.3" +__version__ = "2.14.4" __author__ = "RTE, Antares Web Team" -__date__ = "2023-06-20" +__date__ = "2023-06-28" # noinspection SpellCheckingInspection __credits__ = "(c) Réseau de Transport de l’Électricité (RTE)" diff --git a/antarest/core/core_blueprint.py b/antarest/core/core_blueprint.py index 73fd56fce9..dc4504d35b 100644 --- a/antarest/core/core_blueprint.py +++ b/antarest/core/core_blueprint.py @@ -63,7 +63,9 @@ def kill_worker( ) -> Any: if not current_user.is_site_admin(): raise UserHasNotPermissionError() - logging.getLogger(__name__).warning("Killing the worker") - exit(1) + logging.getLogger(__name__).critical("Killing the worker") + # PyInstaller modifies the behavior of built-in functions, such as `exit`. + # It is advisable to use `sys.exit` or raise the `SystemExit` exception instead. + raise SystemExit(f"Worker killed by the user #{current_user.id}") return bp diff --git a/antarest/core/version_info.py b/antarest/core/version_info.py index f6532b9cdb..311ceff571 100644 --- a/antarest/core/version_info.py +++ b/antarest/core/version_info.py @@ -1,9 +1,11 @@ """ Python module that is dedicated to printing application version and dependencies information """ +import os import subprocess from pathlib import Path from typing import Dict +import sys from pydantic import BaseModel @@ -57,11 +59,9 @@ def get_commit_id(resources_dir: Path) -> str: def get_last_commit_from_git() -> str: """Returns the commit ID of the current Git HEAD, or "".""" - command = "git log -1 HEAD --format=%H" + command = ["git", "log", "-1", "HEAD", "--format=%H"] try: - return subprocess.check_output( - command, encoding="utf-8", shell=True - ).strip() + return subprocess.check_output(command, encoding="utf-8").strip() except (subprocess.CalledProcessError, FileNotFoundError): return "" @@ -79,8 +79,16 @@ def get_dependencies() -> Dict[str, str]: subprocess.CalledProcessError: If the `pip freeze` command fails for some reason. """ + python_name = Path(sys.executable).with_suffix("").name + if python_name.lower() != "python": + # Due to PyInstaller renaming the executable to "AntaresWebServer", + # accessing the "python" executable becomes impossible, resulting in complications + # when trying to obtain the list of installed packages using `pip freeze`. + return {} + # fmt: off - output = subprocess.check_output("pip freeze", encoding="utf-8", shell=True) + args = [sys.executable, "-m", "pip", "freeze"] + output = subprocess.check_output(args, encoding="utf-8") lines = ( line for line in output.splitlines(keepends=False) @@ -90,4 +98,3 @@ def get_dependencies() -> Dict[str, str]: packages = dict(line.split("==", 1) for line in lines) # AntaREST is not a dependency of AntaREST return {k: v for k, v in packages.items() if k.lower() != "antarest"} - # fmt: on diff --git a/antarest/launcher/adapters/local_launcher/local_launcher.py b/antarest/launcher/adapters/local_launcher/local_launcher.py index 5b3a95ff5b..82a70f5e26 100644 --- a/antarest/launcher/adapters/local_launcher/local_launcher.py +++ b/antarest/launcher/adapters/local_launcher/local_launcher.py @@ -6,7 +6,7 @@ import threading import time from pathlib import Path -from typing import IO, Callable, Dict, Optional, Tuple, cast +from typing import IO, Callable, Dict, Optional, Tuple, cast, List from uuid import UUID from antarest.core.config import Config @@ -118,8 +118,13 @@ def stop_reading_output() -> bool: str(uuid), study_uuid, export_path, launcher_parameters ) + args = [ + str(antares_solver_path), + f"--force-parallel={launcher_parameters.nb_cpu}", + str(export_path), + ] process = subprocess.Popen( - [antares_solver_path, export_path], + args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True, diff --git a/antarest/main.py b/antarest/main.py index 1efa1d127c..fcaa6b6215 100644 --- a/antarest/main.py +++ b/antarest/main.py @@ -1,8 +1,9 @@ import argparse import copy import logging +import re from pathlib import Path -from typing import Any, Dict, Optional, Tuple, cast +from typing import Any, Dict, Optional, Sequence, Tuple, cast import pydantic import sqlalchemy.ext.baked # type: ignore @@ -32,11 +33,17 @@ from ratelimit import RateLimitMiddleware # type: ignore from ratelimit.backends.redis import RedisBackend # type: ignore from ratelimit.backends.simple import MemoryBackend # type: ignore +from starlette.middleware.base import ( + BaseHTTPMiddleware, + DispatchFunction, + RequestResponseEndpoint, +) from starlette.middleware.cors import CORSMiddleware from starlette.requests import Request from starlette.responses import JSONResponse from starlette.staticfiles import StaticFiles from starlette.templating import Jinja2Templates +from starlette.types import ASGIApp logger = logging.getLogger(__name__) @@ -174,6 +181,59 @@ def parse_arguments() -> argparse.Namespace: return parser.parse_args() +class URLRewriterMiddleware(BaseHTTPMiddleware): + """ + Middleware that rewrites the URL path to "/" (root path) for incoming requests + that do not match the known end points. This is useful for redirecting requests + to the main page of a ReactJS application when the user refreshes the browser. + """ + + def __init__( + self, + app: ASGIApp, + dispatch: Optional[DispatchFunction] = None, + root_path: str = "", + route_paths: Sequence[str] = (), + ) -> None: + """ + Initializes an instance of the URLRewriterMiddleware. + + Args: + app: The ASGI application to which the middleware is applied. + dispatch: The dispatch function to use. + root_path: The root path of the application. + The URL path will be rewritten relative to this root path. + route_paths: The known route paths of the application. + Requests that do not match any of these paths will be rewritten to the root path. + + Note: + The `root_path` can be set to a specific component of the URL path, such as "api". + The `route_paths` should contain all the known endpoints of the application. + """ + dispatch = self.dispatch if dispatch is None else dispatch + super().__init__(app, dispatch) + self.root_path = f"/{root_path}" if root_path else "" + self.known_prefixes = { + re.findall(r"/(?:(?!/).)*", p)[0] for p in route_paths if p != "/" + } + + async def dispatch( + self, request: Request, call_next: RequestResponseEndpoint + ) -> Any: + """ + Intercepts the incoming request and rewrites the URL path if necessary. + Passes the modified or original request to the next middleware or endpoint handler. + """ + url_path = request.scope["path"] + if url_path in {"", "/"}: + pass + elif self.root_path and url_path.startswith(self.root_path): + request.scope["path"] = url_path[len(self.root_path) :] + elif not any(url_path.startswith(ep) for ep in self.known_prefixes): + request.scope["path"] = "/" + return await call_next(request) + + def fastapi_app( config_file: Path, resource_path: Optional[Path] = None, @@ -209,17 +269,6 @@ def fastapi_app( @application.get("/", include_in_schema=False) def home(request: Request) -> Any: - """ - Home ui - --- - responses: - '200': - content: - application/html: {} - description: html home page - tags: - - UI - """ return templates.TemplateResponse( "index.html", {"request": request} ) @@ -228,17 +277,6 @@ def home(request: Request) -> Any: # noinspection PyUnusedLocal @application.get("/", include_in_schema=False) def home(request: Request) -> Any: - """ - Home ui - --- - responses: - '200': - content: - application/html: {} - description: html home page - tags: - - UI - """ return "" @application.on_event("startup") @@ -386,6 +424,18 @@ def handle_all_exception(request: Request, exc: Exception) -> Any: services = create_services(config, application) + if mount_front: + # When the web application is running in Desktop mode, the ReactJS web app + # is served at the `/static` entry point. Any requests that are not API + # requests should be redirected to the `index.html` file, which will handle + # the route provided by the URL. + route_paths = [r.path for r in application.routes] # type: ignore + application.add_middleware( + URLRewriterMiddleware, + root_path=application.root_path, + route_paths=route_paths, + ) + if ( config.server.services and Module.WATCHER.value in config.server.services diff --git a/antarest/matrixstore/model.py b/antarest/matrixstore/model.py index 0840f887ad..6155f717ca 100644 --- a/antarest/matrixstore/model.py +++ b/antarest/matrixstore/model.py @@ -1,23 +1,40 @@ +import datetime import uuid -from datetime import datetime -from typing import Any, List, Optional, Union - -from pydantic import BaseModel -from sqlalchemy import Column, String, Enum, DateTime, Table, ForeignKey, Integer, Boolean # type: ignore -from sqlalchemy.orm import relationship # type: ignore -from sqlalchemy.orm.collections import attribute_mapped_collection # type: ignore +from typing import Any, List, Union from antarest.core.persistence import Base from antarest.login.model import GroupDTO, Identity, UserInfo +from pydantic import BaseModel +from sqlalchemy import ( # type: ignore + Boolean, + Column, + DateTime, + ForeignKey, + Integer, + String, + Table, +) +from sqlalchemy.orm import relationship # type: ignore class Matrix(Base): # type: ignore + """ + Represents a matrix object in the database. + + Attributes: + id: A SHA256 hash for the matrix data (primary key). + width: Number of columns in the matrix. + height: Number of rows in the matrix. + created_at: Creation date of the matrix (unknown usage). + """ + + # noinspection SpellCheckingInspection __tablename__ = "matrix" - id = Column(String(64), primary_key=True) - width = Column(Integer) - height = Column(Integer) - created_at = Column(DateTime) + id: str = Column(String(64), primary_key=True) + width: int = Column(Integer) + height: int = Column(Integer) + created_at: datetime.datetime = Column(DateTime) def __eq__(self, other: Any) -> bool: if not isinstance(other, Matrix): @@ -59,19 +76,23 @@ class MatrixDataSetDTO(BaseModel): class MatrixDataSetRelation(Base): # type: ignore + # noinspection SpellCheckingInspection __tablename__ = "dataset_matrices" - dataset_id = Column( + + # noinspection SpellCheckingInspection + dataset_id: str = Column( String, ForeignKey("dataset.id", name="fk_matrixdatasetrelation_dataset_id"), primary_key=True, ) - matrix_id = Column( + # noinspection SpellCheckingInspection + matrix_id: str = Column( String, ForeignKey("matrix.id", name="fk_matrixdatasetrelation_matrix_id"), primary_key=True, ) - name = Column(String, primary_key=True) - matrix = relationship(Matrix) + name: str = Column(String, primary_key=True) + matrix: Matrix = relationship(Matrix) def __eq__(self, other: Any) -> bool: if not isinstance(other, MatrixDataSetRelation): @@ -87,24 +108,43 @@ def __eq__(self, other: Any) -> bool: class MatrixDataSet(Base): # type: ignore + """ + Represents a user dataset containing matrices in the database. + + Attributes: + id: The unique identifier of the dataset (primary key). + name: The name of the dataset. + owner_id: The foreign key referencing the owner's identity. + public: Indicates whether the dataset is public or not. + created_at: The creation date of the dataset. + updated_at: The last update date of the dataset. + + Relationships: + owner (Identity): The relationship to the owner's identity. + groups (List[Group]): The relationship to groups associated with the dataset. + matrices (List[MatrixDataSetRelation]): The relationship to matrix dataset relations. + """ + + # noinspection SpellCheckingInspection __tablename__ = "dataset" - id = Column( + id: str = Column( String(36), primary_key=True, default=lambda: str(uuid.uuid4()), unique=True, ) - name = Column(String) - owner_id = Column( + name: str = Column(String) + # noinspection SpellCheckingInspection + owner_id: int = Column( Integer, ForeignKey("identities.id", name="fk_matrixdataset_identities_id"), ) - public = Column(Boolean, default=False) - created_at = Column(DateTime) - updated_at = Column(DateTime) + public: bool = Column(Boolean, default=False) + created_at: datetime.datetime = Column(DateTime) + updated_at: datetime.datetime = Column(DateTime) - owner = relationship(Identity) + owner: Identity = relationship(Identity) groups = relationship( "Group", secondary=lambda: groups_dataset_relation, diff --git a/antarest/matrixstore/repository.py b/antarest/matrixstore/repository.py index 8f8fd5f461..79362648c5 100644 --- a/antarest/matrixstore/repository.py +++ b/antarest/matrixstore/repository.py @@ -67,7 +67,7 @@ def query( """ query = db.session.query(MatrixDataSet) if name is not None: - query = query.filter(MatrixDataSet.name.ilike(f"%{name}%")) + query = query.filter(MatrixDataSet.name.ilike(f"%{name}%")) # type: ignore if owner is not None: query = query.filter(MatrixDataSet.owner_id == owner) datasets: List[MatrixDataSet] = query.distinct().all() diff --git a/antarest/study/business/adequacy_patch_management.py b/antarest/study/business/adequacy_patch_management.py index 836d6804fe..37e3bb9950 100644 --- a/antarest/study/business/adequacy_patch_management.py +++ b/antarest/study/business/adequacy_patch_management.py @@ -1,13 +1,14 @@ from enum import Enum -from typing import Optional, List, Any, Dict +from typing import Any, Dict, List, Optional from pydantic.types import StrictBool, confloat +from antarest.study.business.enum_ignore_case import EnumIgnoreCase from antarest.study.business.utils import ( + GENERAL_DATA_PATH, + FieldInfo, FormFieldsBaseModel, execute_or_add_commands, - FieldInfo, - GENERAL_DATA_PATH, ) from antarest.study.model import Study from antarest.study.storage.storage_service import StudyStorageService @@ -16,7 +17,7 @@ ) -class PriceTakingOrder(str, Enum): +class PriceTakingOrder(EnumIgnoreCase): DENS = "DENS" LOAD = "Load" diff --git a/antarest/study/business/advanced_parameters_management.py b/antarest/study/business/advanced_parameters_management.py index bee06dde49..9b33035c84 100644 --- a/antarest/study/business/advanced_parameters_management.py +++ b/antarest/study/business/advanced_parameters_management.py @@ -1,14 +1,16 @@ import re -from typing import Optional, List, Any, Dict, TypedDict from enum import Enum +from typing import Any, Dict, List, Optional, TypedDict from pydantic import validator from pydantic.types import StrictInt, StrictStr + +from antarest.study.business.enum_ignore_case import EnumIgnoreCase from antarest.study.business.utils import ( - FormFieldsBaseModel, - execute_or_add_commands, GENERAL_DATA_PATH, FieldInfo, + FormFieldsBaseModel, + execute_or_add_commands, ) from antarest.study.model import Study from antarest.study.storage.storage_service import StudyStorageService @@ -17,42 +19,42 @@ ) -class InitialReservoirLevel(str, Enum): +class InitialReservoirLevel(EnumIgnoreCase): COLD_START = "cold start" HOT_START = "hot start" -class HydroHeuristicPolicy(str, Enum): +class HydroHeuristicPolicy(EnumIgnoreCase): ACCOMMODATE_RULES_CURVES = "accommodate rule curves" MAXIMIZE_GENERATION = "maximize generation" -class HydroPricingMode(str, Enum): +class HydroPricingMode(EnumIgnoreCase): FAST = "fast" ACCURATE = "accurate" -class PowerFluctuation(str, Enum): +class PowerFluctuation(EnumIgnoreCase): FREE_MODULATIONS = "free modulations" MINIMIZE_EXCURSIONS = "minimize excursions" MINIMIZE_RAMPING = "minimize ramping" -class SheddingPolicy(str, Enum): +class SheddingPolicy(EnumIgnoreCase): SHAVE_PEAKS = "shave peaks" MINIMIZE_DURATION = "minimize duration" -class ReserveManagement(str, Enum): +class ReserveManagement(EnumIgnoreCase): GLOBAL = "global" -class UnitCommitmentMode(str, Enum): +class UnitCommitmentMode(EnumIgnoreCase): FAST = "fast" ACCURATE = "accurate" -class SimulationCore(str, Enum): +class SimulationCore(EnumIgnoreCase): MINIMUM = "minimum" LOW = "low" MEDIUM = "medium" @@ -60,7 +62,7 @@ class SimulationCore(str, Enum): MAXIMUM = "maximum" -class RenewableGenerationModeling(str, Enum): +class RenewableGenerationModeling(EnumIgnoreCase): AGGREGATED = "aggregated" CLUSTERS = "clusters" diff --git a/antarest/study/business/areas/properties_management.py b/antarest/study/business/areas/properties_management.py index efd01446ed..c5644cae4f 100644 --- a/antarest/study/business/areas/properties_management.py +++ b/antarest/study/business/areas/properties_management.py @@ -1,13 +1,14 @@ import re from builtins import sorted from enum import Enum -from typing import Optional, Dict, Any, cast, List, Set, Iterable +from typing import Any, Dict, Iterable, List, Optional, Set, cast -from pydantic import root_validator, Field +from pydantic import Field, root_validator +from antarest.study.business.enum_ignore_case import EnumIgnoreCase from antarest.study.business.utils import ( - FormFieldsBaseModel, FieldInfo, + FormFieldsBaseModel, execute_or_add_commands, ) from antarest.study.model import Study @@ -65,7 +66,7 @@ def decode_filter( return ", ".join(sort_filter_options(encoded_value)) -class AdequacyPatchMode(str, Enum): +class AdequacyPatchMode(EnumIgnoreCase): OUTSIDE = "outside" INSIDE = "inside" VIRTUAL = "virtual" diff --git a/antarest/study/business/areas/renewable_management.py b/antarest/study/business/areas/renewable_management.py index cff4cbb447..67ffa9dd1a 100644 --- a/antarest/study/business/areas/renewable_management.py +++ b/antarest/study/business/areas/renewable_management.py @@ -2,6 +2,9 @@ from pathlib import PurePosixPath from typing import Any, Dict, List, Optional +from pydantic import Field + +from antarest.study.business.enum_ignore_case import EnumIgnoreCase from antarest.study.business.utils import ( FieldInfo, FormFieldsBaseModel, @@ -12,10 +15,9 @@ from antarest.study.storage.variantstudy.model.command.update_config import ( UpdateConfig, ) -from pydantic import Field -class TimeSeriesInterpretation(str, Enum): +class TimeSeriesInterpretation(EnumIgnoreCase): POWER_GENERATION = "power-generation" PRODUCTION_FACTOR = "production-factor" diff --git a/antarest/study/business/areas/thermal_management.py b/antarest/study/business/areas/thermal_management.py index 92c708dc59..96d8a60a6c 100644 --- a/antarest/study/business/areas/thermal_management.py +++ b/antarest/study/business/areas/thermal_management.py @@ -1,12 +1,13 @@ from enum import Enum from pathlib import PurePosixPath -from typing import Optional, Dict, Any, List, cast +from typing import Any, Dict, List, Optional, cast -from pydantic import StrictStr, StrictBool +from pydantic import StrictBool, StrictStr +from antarest.study.business.enum_ignore_case import EnumIgnoreCase from antarest.study.business.utils import ( - FormFieldsBaseModel, FieldInfo, + FormFieldsBaseModel, execute_or_add_commands, ) from antarest.study.model import Study @@ -16,13 +17,13 @@ ) -class TimeSeriesGenerationOption(str, Enum): +class TimeSeriesGenerationOption(EnumIgnoreCase): USE_GLOBAL_PARAMETER = "use global parameter" FORCE_NO_GENERATION = "force no generation" FORCE_GENERATION = "force generation" -class LawOption(str, Enum): +class LawOption(EnumIgnoreCase): UNIFORM = "uniform" GEOMETRIC = "geometric" diff --git a/antarest/study/business/enum_ignore_case.py b/antarest/study/business/enum_ignore_case.py new file mode 100644 index 0000000000..aa784aa9c4 --- /dev/null +++ b/antarest/study/business/enum_ignore_case.py @@ -0,0 +1,34 @@ +import enum +import typing + + +class EnumIgnoreCase(str, enum.Enum): + """ + Case-insensitive enum base class + + Usage: + + >>> class WeekDay(EnumIgnoreCase): + ... MONDAY = "Monday" + ... TUESDAY = "Tuesday" + ... WEDNESDAY = "Wednesday" + ... THURSDAY = "Thursday" + ... FRIDAY = "Friday" + ... SATURDAY = "Saturday" + ... SUNDAY = "Sunday" + >>> WeekDay("monday") + + >>> WeekDay("MONDAY") + + """ + + @classmethod + def _missing_(cls, value: object) -> typing.Optional["EnumIgnoreCase"]: + if isinstance(value, str): + for member in cls: + # noinspection PyUnresolvedReferences + if member.value.upper() == value.upper(): + # noinspection PyTypeChecker + return member + # `value` is not a valid + return None diff --git a/antarest/study/business/general_management.py b/antarest/study/business/general_management.py index bc35a5bcc2..13482dc138 100644 --- a/antarest/study/business/general_management.py +++ b/antarest/study/business/general_management.py @@ -1,12 +1,13 @@ from enum import Enum -from typing import Optional, Dict, Any, List, cast +from typing import Any, Dict, List, Optional, cast -from pydantic import StrictBool, conint, PositiveInt, root_validator +from pydantic import PositiveInt, StrictBool, conint, root_validator +from antarest.study.business.enum_ignore_case import EnumIgnoreCase from antarest.study.business.utils import ( - FormFieldsBaseModel, - FieldInfo, GENERAL_DATA_PATH, + FieldInfo, + FormFieldsBaseModel, execute_or_add_commands, ) from antarest.study.model import Study @@ -20,13 +21,13 @@ ) -class Mode(str, Enum): +class Mode(EnumIgnoreCase): ECONOMY = "Economy" ADEQUACY = "Adequacy" DRAFT = "draft" -class Month(str, Enum): +class Month(EnumIgnoreCase): JANUARY = "january" FEBRUARY = "february" MARCH = "march" @@ -41,7 +42,7 @@ class Month(str, Enum): DECEMBER = "december" -class WeekDay(str, Enum): +class WeekDay(EnumIgnoreCase): MONDAY = "Monday" TUESDAY = "Tuesday" WEDNESDAY = "Wednesday" @@ -51,7 +52,7 @@ class WeekDay(str, Enum): SUNDAY = "Sunday" -class BuildingMode(str, Enum): +class BuildingMode(EnumIgnoreCase): AUTOMATIC = "Automatic" CUSTOM = "Custom" DERATED = "Derated" diff --git a/antarest/study/business/optimization_management.py b/antarest/study/business/optimization_management.py index 3339347081..a76726d244 100644 --- a/antarest/study/business/optimization_management.py +++ b/antarest/study/business/optimization_management.py @@ -1,13 +1,14 @@ from enum import Enum -from typing import Optional, Union, List, Any, Dict, cast +from typing import Any, Dict, List, Optional, Union, cast from pydantic.types import StrictBool +from antarest.study.business.enum_ignore_case import EnumIgnoreCase from antarest.study.business.utils import ( + GENERAL_DATA_PATH, + FieldInfo, FormFieldsBaseModel, execute_or_add_commands, - FieldInfo, - GENERAL_DATA_PATH, ) from antarest.study.model import Study from antarest.study.storage.storage_service import StudyStorageService @@ -16,11 +17,11 @@ ) -class LegacyTransmissionCapacities(str, Enum): +class LegacyTransmissionCapacities(EnumIgnoreCase): INFINITE = "infinite" -class TransmissionCapacities(str, Enum): +class TransmissionCapacities(EnumIgnoreCase): LOCAL_VALUES = "local-values" NULL_FOR_ALL_LINKS = "null-for-all-links" INFINITE_FOR_ALL_LINKS = "infinite-for-all-links" @@ -28,14 +29,14 @@ class TransmissionCapacities(str, Enum): INFINITE_FOR_PHYSICAL_LINKS = "infinite-for-physical-links" -class UnfeasibleProblemBehavior(str, Enum): +class UnfeasibleProblemBehavior(EnumIgnoreCase): WARNING_DRY = "warning-dry" WARNING_VERBOSE = "warning-verbose" ERROR_DRY = "error-dry" ERROR_VERBOSE = "error-verbose" -class SimplexOptimizationRange(str, Enum): +class SimplexOptimizationRange(EnumIgnoreCase): DAY = "day" WEEK = "week" diff --git a/antarest/study/business/table_mode_management.py b/antarest/study/business/table_mode_management.py index 2662a2446d..a8014e7c44 100644 --- a/antarest/study/business/table_mode_management.py +++ b/antarest/study/business/table_mode_management.py @@ -1,19 +1,9 @@ from enum import Enum -from typing import ( - Optional, - Dict, - TypedDict, - Union, - Any, - List, -) +from typing import Any, Dict, List, Optional, TypedDict, Union from pydantic import StrictFloat -from pydantic.types import StrictStr, StrictInt, StrictBool +from pydantic.types import StrictBool, StrictInt, StrictStr -from antarest.study.business.binding_constraint_management import ( - BindingConstraintManager, -) from antarest.study.business.areas.properties_management import ( AdequacyPatchMode, ) @@ -21,21 +11,25 @@ TimeSeriesInterpretation, ) from antarest.study.business.areas.thermal_management import ( - TimeSeriesGenerationOption, LawOption, + TimeSeriesGenerationOption, +) +from antarest.study.business.binding_constraint_management import ( + BindingConstraintManager, ) +from antarest.study.business.enum_ignore_case import EnumIgnoreCase from antarest.study.business.utils import ( FormFieldsBaseModel, execute_or_add_commands, ) -from antarest.study.model import RawStudy -from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy -from antarest.study.storage.storage_service import StudyStorageService from antarest.study.common.default_values import ( - NodalOptimization, FilteringOptions, LinkProperties, + NodalOptimization, ) +from antarest.study.model import RawStudy +from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy +from antarest.study.storage.storage_service import StudyStorageService from antarest.study.storage.variantstudy.model.command.icommand import ICommand from antarest.study.storage.variantstudy.model.command.update_binding_constraint import ( UpdateBindingConstraint, @@ -45,7 +39,7 @@ ) -class TableTemplateType(str, Enum): +class TableTemplateType(EnumIgnoreCase): AREA = "area" LINK = "link" CLUSTER = "cluster" @@ -53,7 +47,7 @@ class TableTemplateType(str, Enum): BINDING_CONSTRAINT = "binding constraint" -class AssetType(str, Enum): +class AssetType(EnumIgnoreCase): AC = "ac" DC = "dc" GAZ = "gaz" @@ -61,19 +55,19 @@ class AssetType(str, Enum): OTHER = "other" -class TransmissionCapacity(str, Enum): +class TransmissionCapacity(EnumIgnoreCase): INFINITE = "infinite" IGNORE = "ignore" ENABLED = "enabled" -class BindingConstraintType(str, Enum): +class BindingConstraintType(EnumIgnoreCase): HOURLY = "hourly" DAILY = "daily" WEEKLY = "weekly" -class BindingConstraintOperator(str, Enum): +class BindingConstraintOperator(EnumIgnoreCase): LESS = "less" GREATER = "greater" BOTH = "both" diff --git a/antarest/study/business/timeseries_config_management.py b/antarest/study/business/timeseries_config_management.py index 9fa59ad33b..4c0dd48c86 100644 --- a/antarest/study/business/timeseries_config_management.py +++ b/antarest/study/business/timeseries_config_management.py @@ -1,18 +1,14 @@ from enum import Enum -from typing import Dict, Optional, List +from typing import Dict, List, Optional -from pydantic import ( - root_validator, - validator, - StrictBool, - StrictInt, -) +from pydantic import StrictBool, StrictInt, root_validator, validator from antarest.core.model import JSON +from antarest.study.business.enum_ignore_case import EnumIgnoreCase from antarest.study.business.utils import ( - execute_or_add_commands, - FormFieldsBaseModel, GENERAL_DATA_PATH, + FormFieldsBaseModel, + execute_or_add_commands, ) from antarest.study.model import Study from antarest.study.storage.rawstudy.model.filesystem.config.model import ( @@ -25,7 +21,7 @@ ) -class TSType(str, Enum): +class TSType(EnumIgnoreCase): LOAD = "load" HYDRO = "hydro" THERMAL = "thermal" @@ -35,7 +31,7 @@ class TSType(str, Enum): NTC = "ntc" -class SeasonCorrelation(str, Enum): +class SeasonCorrelation(EnumIgnoreCase): MONTHLY = "monthly" ANNUAL = "annual" diff --git a/antarest/study/business/xpansion_management.py b/antarest/study/business/xpansion_management.py index d18159df5d..c6e391c249 100644 --- a/antarest/study/business/xpansion_management.py +++ b/antarest/study/business/xpansion_management.py @@ -3,15 +3,16 @@ from enum import Enum from http import HTTPStatus from io import BytesIO -from typing import Optional, Union, List, cast -from zipfile import ZipFile, BadZipFile +from typing import List, Optional, Union, cast +from zipfile import BadZipFile, ZipFile from fastapi import HTTPException, UploadFile -from pydantic import Field, BaseModel, validator +from pydantic import BaseModel, Field, validator from antarest.core.exceptions import BadZipBinary from antarest.core.model import JSON from antarest.core.utils.utils import suppress_exception +from antarest.study.business.enum_ignore_case import EnumIgnoreCase from antarest.study.model import Study from antarest.study.storage.rawstudy.model.filesystem.bucket_node import ( BucketNode, @@ -30,35 +31,35 @@ logger = logging.getLogger(__name__) -class XpansionResourceFileType(str, Enum): +class XpansionResourceFileType(EnumIgnoreCase): CAPACITIES = "capacities" WEIGHTS = "weights" CONSTRAINTS = "constraints" -class UcType(str, Enum): +class UcType(EnumIgnoreCase): EXPANSION_FAST = "expansion_fast" EXPANSION_ACCURATE = "expansion_accurate" -class Master(str, Enum): +class Master(EnumIgnoreCase): INTEGER = "integer" RELAXED = "relaxed" -class CutType(str, Enum): +class CutType(EnumIgnoreCase): AVERAGE = "average" YEARLY = "yearly" WEEKLY = "weekly" -class Solver(str, Enum): +class Solver(EnumIgnoreCase): CBC = "Cbc" COIN = "Coin" XPRESS = "Xpress" -class MaxIteration(str, Enum): +class MaxIteration(EnumIgnoreCase): INF = "+Inf" diff --git a/antarest/study/service.py b/antarest/study/service.py index 333ce6df41..9e0c7ab746 100644 --- a/antarest/study/service.py +++ b/antarest/study/service.py @@ -52,9 +52,7 @@ from antarest.core.utils.utils import StopWatch from antarest.login.model import Group from antarest.login.service import LoginService -from antarest.matrixstore.matrix_editor import ( - MatrixEditInstruction, -) +from antarest.matrixstore.matrix_editor import MatrixEditInstruction from antarest.study.business.adequacy_patch_management import ( AdequacyPatchManager, ) @@ -69,16 +67,18 @@ AreaType, AreaUI, ) +from antarest.study.business.areas.hydro_management import HydroManager from antarest.study.business.areas.properties_management import ( PropertiesManager, ) +from antarest.study.business.areas.renewable_management import RenewableManager +from antarest.study.business.areas.thermal_management import ThermalManager from antarest.study.business.binding_constraint_management import ( BindingConstraintManager, ) from antarest.study.business.config_management import ConfigManager from antarest.study.business.district_manager import DistrictManager from antarest.study.business.general_management import GeneralManager -from antarest.study.business.areas.hydro_management import HydroManager from antarest.study.business.link_management import LinkInfoDTO, LinkManager from antarest.study.business.matrix_management import ( MatrixManager, @@ -86,7 +86,6 @@ ) from antarest.study.business.optimization_management import OptimizationManager from antarest.study.business.playlist_management import PlaylistManager -from antarest.study.business.areas.renewable_management import RenewableManager from antarest.study.business.scenario_builder_management import ( ScenarioBuilderManager, ) @@ -94,7 +93,6 @@ from antarest.study.business.thematic_trimming_management import ( ThematicTrimmingManager, ) -from antarest.study.business.areas.thermal_management import ThermalManager from antarest.study.business.timeseries_config_management import ( TimeSeriesConfigManager, ) @@ -529,16 +527,6 @@ def edit_comments( else: raise StudyTypeUnsupported(study.id, study.type) - def _get_study_metadatas(self, params: RequestParameters) -> List[Study]: - return list( - filter( - lambda study: assert_permission( - params.user, study, StudyPermissionType.READ, raising=False - ), - self.repository.get_all(), - ) - ) - def get_studies_information( self, managed: bool, diff --git a/antarest/study/storage/variantstudy/variant_study_service.py b/antarest/study/storage/variantstudy/variant_study_service.py index f1b615aa31..6d7bf6640b 100644 --- a/antarest/study/storage/variantstudy/variant_study_service.py +++ b/antarest/study/storage/variantstudy/variant_study_service.py @@ -742,7 +742,7 @@ def generate_study_config( if variant_study.parent_id is None: raise NoParentStudyError(variant_study_id) - return self._generate_study_config(variant_study, None) + return self._generate_study_config(variant_study, variant_study, None) def _generate( self, @@ -897,7 +897,10 @@ def _generate( return results def _generate_study_config( - self, metadata: VariantStudy, config: Optional[FileStudyTreeConfig] + self, + original_study: VariantStudy, + metadata: VariantStudy, + config: Optional[FileStudyTreeConfig], ) -> Tuple[GenerationResultInfoDTO, FileStudyTreeConfig]: parent_study = self.repository.get(metadata.parent_id) if parent_study is None: @@ -907,13 +910,13 @@ def _generate_study_config( study = self.study_factory.create_from_fs( self.raw_study_service.get_study_path(parent_study), parent_study.id, - output_path=Path(metadata.path) / OUTPUT_RELATIVE_PATH, + output_path=Path(original_study.path) / OUTPUT_RELATIVE_PATH, use_cache=False, ) parent_config = study.config else: res, parent_config = self._generate_study_config( - parent_study, config + original_study, parent_study, config ) if res is not None and not res.success: return res, parent_config diff --git a/docs/CHANGELOG.md b/docs/CHANGELOG.md index 590269dfc5..5628bd95e3 100644 --- a/docs/CHANGELOG.md +++ b/docs/CHANGELOG.md @@ -1,6 +1,40 @@ Antares Web Changelog ===================== +v2.14.4 (2023-06-28) +-------------------- + +### Bug Fixes + +* **launcher:** take into account the `nb_cpu` in the local Solver command line (#1603) ([7bb4f0c](https://github.com/AntaresSimulatorTeam/AntaREST/commit/7bb4f0c45db8ddbaedc1a814d0bfddb9fb440aba)) +* **api:** resolve version display issue in Desktop's `/version` endpoint (#1605) ([a0bf966](https://github.com/AntaresSimulatorTeam/AntaREST/commit/a0bf966dc0b7a0ee302b7d25ff0d95f5307d8117)) +* **study:** fixing case sensitivity issues in reading study configuration (#1610) ([f03ad59](https://github.com/AntaresSimulatorTeam/AntaREST/commit/f03ad59f41a4d5a29a088e7ff98d20037540563b)) +* **api:** correct `/kill` end-point implementation to work with PyInstaller ([213fb88](https://github.com/AntaresSimulatorTeam/AntaREST/commit/213fb885b05490afe573938ec4300f07b561b2dd)) +* **fastapi:** correct URL inconsistency between the webapp and the API (#1612) ([195d22c](https://github.com/AntaresSimulatorTeam/AntaREST/commit/195d22c7005e2abad7f389164b0701a8fa24b98c)) +* **i18n:** wrong translations and add missing keys (#1615) ([7a7019c](https://github.com/AntaresSimulatorTeam/AntaREST/commit/7a7019cc1e900feaa5681d2244a81550510e9a78)) +* **deploy:** change example study settings to allow parallel run (#1617) ([389793e](https://github.com/AntaresSimulatorTeam/AntaREST/commit/389793e08dee0f05dfe68d952e9b85b64b3bc57e)) +* **variant:** get synthesis now also works for level 2+ variants (#1622) ([661b856](https://github.com/AntaresSimulatorTeam/AntaREST/commit/661b856331673ac792fd2ca264d0fb45433d3ee5)) +* **results:** refresh study outputs when job completed and add back button (#1621) ([39846c0](https://github.com/AntaresSimulatorTeam/AntaREST/commit/39846c07db0ccd540fcf73fe8a5d711012101226)) +* **deploy:** remove unnecessary Outputs from "000 Free Data Sample" study (#1628) ([a154dac](https://github.com/AntaresSimulatorTeam/AntaREST/commit/a154dacdc11e99a38cbc2d2930c50875563b76a2)) + + +### Documentation + +* **model:** add documentation to the `Matrix` and `MatrixDataSet` classes ([f7ae5f4](https://github.com/AntaresSimulatorTeam/AntaREST/commit/f7ae5f4735eb4add02c8aa951eaf30405748dfe6)) + + +### Code Refactoring + +* **api:** remove unused protected function ([6ea3ad7](https://github.com/AntaresSimulatorTeam/AntaREST/commit/6ea3ad7208fd16746bd134aebf8ed8ea9b3da61d)) + + +### Contributors + +laurent-laporte-pro, +MartinBelthle, +skamril + + v2.14.3 (2023-06-20) -------------------- diff --git a/resources/deploy/config.yaml b/resources/deploy/config.yaml index e839384ebc..48cea48a22 100644 --- a/resources/deploy/config.yaml +++ b/resources/deploy/config.yaml @@ -47,16 +47,13 @@ launcher: # slurm_script_path: /path/to/launchantares_v1.1.3.sh # db_primary_key: name # antares_versions_on_remote_server : -# - "610" -# - "700" -# - "710" -# - "720" -# - "800" +# - "840" +# - "850" debug: false -root_path: "" +root_path: "api" #tasks: # max_workers: 5 @@ -64,7 +61,7 @@ server: worker_threadpool_size: 12 services: - watcher - + logging: level: INFO logfile: ./tmp/antarest.log diff --git a/resources/deploy/examples/studies/example_study.zip b/resources/deploy/examples/studies/example_study.zip index 3061ef8fb6..1a5401221e 100644 Binary files a/resources/deploy/examples/studies/example_study.zip and b/resources/deploy/examples/studies/example_study.zip differ diff --git a/scripts/update_version.py b/scripts/update_version.py index 9c982bb830..b3cfd253df 100755 --- a/scripts/update_version.py +++ b/scripts/update_version.py @@ -7,6 +7,7 @@ import datetime import pathlib import re +import typing try: from antarest import __version__ @@ -18,6 +19,84 @@ PROJECT_DIR = next(iter(p for p in HERE.parents if p.joinpath("antarest").exists())) # fmt: on +TOKENS = [ + ("H1", r"^([^\n]+)\n={3,}$"), + ("H2", r"^([^\n]+)\n-{3,}$"), + ("H3", r"^#{3}\s+([^\n]+)$"), + ("H4", r"^#{4}\s+([^\n]+)$"), + ("LINE", r"^[^\n]+$"), + ("NEWLINE", r"\n"), + ("MISMATCH", r"."), +] + +ANY_TOKEN_RE = "|".join([f"(?P<{name}>{regex})" for name, regex in TOKENS]) + + +class Token: + def __init__(self, kind: str, text: str) -> None: + self.kind = kind + self.text = text + + def __str__(self) -> str: + return self.text + + +class NewlineToken(Token): + def __init__(self) -> None: + super().__init__("NEWLINE", "\n") + + +class TitleToken(Token): + def __init__(self, kind: str, text: str) -> None: + super().__init__(kind, text) + + @property + def level(self) -> int: + return int(self.kind[1:]) + + def __str__(self) -> str: + title = self.text.strip() + if self.level == 1: + return "\n".join([title, "=" * len(title)]) + elif self.level == 2: + return "\n".join([title, "-" * len(title)]) + else: + return "#" * self.level + " " + title + + +def parse_changelog(change_log: str) -> typing.Generator[Token, None, None]: + for mo in re.finditer(ANY_TOKEN_RE, change_log, flags=re.MULTILINE): + kind = mo.lastgroup + if kind in {"H1", "H2", "H3", "H4"} and mo.lastindex is not None: + title = mo[mo.lastindex + 1] + yield TitleToken(kind, title) + elif kind == "LINE": + yield Token(kind, mo.group()) + elif kind == "NEWLINE": + yield NewlineToken() + else: + raise NotImplementedError(kind, mo.group()) + + +def update_changelog( + change_log: str, new_version: str, new_date: str +) -> typing.Generator[Token, None, None]: + title_found = False + new_title = f"v{new_version} ({new_date})" + for token in parse_changelog(change_log): + if ( + not title_found + and isinstance(token, TitleToken) + and token.level == 2 + ): + title_found = True + if token.text != new_title: + yield TitleToken(kind=token.kind, text=new_title) + yield NewlineToken() + yield NewlineToken() + yield NewlineToken() + yield token + def upgrade_version(new_version: str, new_date: str) -> None: """ @@ -77,6 +156,13 @@ def upgrade_version(new_version: str, new_date: str) -> None: patched = re.sub(search, replace, text, count=1) fullpath.write_text(patched, encoding="utf-8") + print("Preparing the CHANGELOG in the documentation...") + changelog_path = PROJECT_DIR.joinpath("docs/CHANGELOG.md") + change_log = changelog_path.read_text(encoding="utf-8") + with changelog_path.open(mode="w", encoding="utf-8") as fd: + for token in update_changelog(change_log, new_version, new_date): + print(token, end="", file=fd) + print("The version has been successfully updated.") diff --git a/setup.py b/setup.py index fd06118f12..610a0ca972 100644 --- a/setup.py +++ b/setup.py @@ -5,7 +5,7 @@ setuptools.setup( name="AntaREST", - version="2.14.3", + version="2.14.4", description="Antares Server", long_description=long_description, long_description_content_type="text/markdown", diff --git a/sonar-project.properties b/sonar-project.properties index 5f2a0be80f..2e78a84c9b 100644 --- a/sonar-project.properties +++ b/sonar-project.properties @@ -6,5 +6,5 @@ sonar.exclusions=antarest/gui.py,antarest/main.py sonar.python.coverage.reportPaths=coverage.xml sonar.python.version=3.8 sonar.javascript.lcov.reportPaths=webapp/coverage/lcov.info -sonar.projectVersion=2.14.3 +sonar.projectVersion=2.14.4 sonar.coverage.exclusions=antarest/gui.py,antarest/main.py,antarest/singleton_services.py,antarest/worker/archive_worker_service.py,webapp/**/* \ No newline at end of file diff --git a/tests/core/test_version_info.py b/tests/core/test_version_info.py index 195280631d..2ed7c82056 100644 --- a/tests/core/test_version_info.py +++ b/tests/core/test_version_info.py @@ -22,14 +22,18 @@ def test_get_dependencies(self) -> None: @pytest.mark.unit_test def test_get_commit_id__commit_id__exist(self, tmp_path) -> None: + # fmt: off path_commit_id = tmp_path.joinpath("commit_id") - path_commit_id.write_text("fake_commit") - assert get_commit_id(tmp_path) == "fake_commit" + path_commit_id.write_text("6d891aba6e4a1c3a6f43b8ca00b021a20d319091") + assert (get_commit_id(tmp_path) == "6d891aba6e4a1c3a6f43b8ca00b021a20d319091") + # fmt: on @pytest.mark.unit_test - def test_get_commit_id__commit_id__missing(self, tmp_path) -> None: - with patch( - "antarest.core.version_info.get_last_commit_from_git", - return_value="mock commit", - ): - assert get_commit_id(tmp_path) == "mock commit" + def test_get_commit_id__git_call_ok(self, tmp_path) -> None: + actual = get_commit_id(tmp_path) + assert re.fullmatch(r"[0-9a-fA-F]{40}", actual) + + @pytest.mark.unit_test + def test_get_commit_id__git_call_failed(self, tmp_path) -> None: + with patch("subprocess.check_output", side_effect=FileNotFoundError): + assert not get_commit_id(tmp_path) diff --git a/tests/integration/test_core_blueprint.py b/tests/integration/test_core_blueprint.py index 60949ea8c9..f1e94baa23 100644 --- a/tests/integration/test_core_blueprint.py +++ b/tests/integration/test_core_blueprint.py @@ -1,3 +1,4 @@ +import http import re from unittest import mock @@ -35,3 +36,27 @@ def test_version_info(self, app: FastAPI): "dependencies": mock.ANY, } assert actual == expected + + +class TestKillWorker: + def test_kill_worker__not_granted(self, app: FastAPI): + client = TestClient(app, raise_server_exceptions=False) + res = client.get("/kill") + assert res.status_code == http.HTTPStatus.UNAUTHORIZED, res.json() + assert res.json() == {"detail": "Missing cookie access_token_cookie"} + + def test_kill_worker__nominal_case(self, app: FastAPI): + client = TestClient(app, raise_server_exceptions=False) + # login as "admin" + res = client.post( + "/v1/login", json={"username": "admin", "password": "admin"} + ) + res.raise_for_status() + credentials = res.json() + admin_access_token = credentials["access_token"] + # kill the worker + res = client.get( + "/kill", headers={"Authorization": f"Bearer {admin_access_token}"} + ) + assert res.status_code == 500, res.json() + assert not res.content diff --git a/tests/integration/test_integration.py b/tests/integration/test_integration.py index 3c29e968c5..89ed597c4f 100644 --- a/tests/integration/test_integration.py +++ b/tests/integration/test_integration.py @@ -2405,6 +2405,23 @@ def test_variant_manager(app: FastAPI): }, ) variant_id = res.json() + + client.post( + f"/v1/launcher/run/{variant_id}", + headers={ + "Authorization": f'Bearer {admin_credentials["access_token"]}' + }, + ) + + res = client.get( + f"v1/studies/{variant_id}/synthesis", + headers={ + "Authorization": f'Bearer {admin_credentials["access_token"]}' + }, + ) + + assert variant_id in res.json()["output_path"] + client.post( f"/v1/studies/{variant_id}/variants?name=bar", headers={ diff --git a/tests/launcher/test_local_launcher.py b/tests/launcher/test_local_launcher.py index 86dde806b5..77c576d269 100644 --- a/tests/launcher/test_local_launcher.py +++ b/tests/launcher/test_local_launcher.py @@ -12,7 +12,7 @@ from antarest.launcher.adapters.local_launcher.local_launcher import ( LocalLauncher, ) -from antarest.launcher.model import JobStatus +from antarest.launcher.model import JobStatus, LauncherParametersDTO from sqlalchemy import create_engine @@ -75,12 +75,23 @@ def test_compute(tmp_path: Path): str(uuid): ("study-id", tmp_path / "run", Mock()) } local_launcher.callbacks.import_output.return_value = "some output" - # noinspection PyTypeChecker + launcher_parameters = LauncherParametersDTO( + adequacy_patch=None, + nb_cpu=8, + post_processing=False, + time_limit=3600, + xpansion=False, + xpansion_r_version=False, + archive_output=False, + auto_unzip=True, + output_suffix="", + other_options="", + ) local_launcher._compute( antares_solver_path=solver_path, study_uuid="study-id", uuid=uuid, - launcher_parameters=None, + launcher_parameters=launcher_parameters, ) # noinspection PyUnresolvedReferences diff --git a/tests/storage/test_service.py b/tests/storage/test_service.py index e5d38e2e82..36ed78e90d 100644 --- a/tests/storage/test_service.py +++ b/tests/storage/test_service.py @@ -8,7 +8,6 @@ from uuid import uuid4 import pytest - from antarest.core.config import Config, StorageConfig, WorkspaceConfig from antarest.core.exceptions import TaskAlreadyRunning from antarest.core.filetransfer.model import FileDownload, FileDownloadTaskDTO @@ -110,35 +109,6 @@ def build_study_service( ) -# noinspection PyArgumentList -@pytest.mark.unit_test -def test_get_studies_uuid() -> None: - bob = User(id=2, name="bob") - alice = User(id=3, name="alice") - - a = Study(id="A", owner=bob) - b = Study(id="B", owner=alice) - c = Study(id="C", owner=bob) - - # Mock - repository = Mock() - repository.get_all.return_value = [a, b, c] - - study_service = Mock() - config = Config( - storage=StorageConfig( - workspaces={DEFAULT_WORKSPACE_NAME: WorkspaceConfig()} - ) - ) - service = build_study_service(study_service, repository, config) - - studies = service._get_study_metadatas( - RequestParameters(user=JWTUser(id=2, impersonator=2, type="users")) - ) - - assert [a, c] == studies - - def study_to_dto(study: Study) -> StudyMetadataDTO: return StudyMetadataDTO( id=study.id, diff --git a/webapp/package-lock.json b/webapp/package-lock.json index b98224964d..8d81a6e4be 100644 --- a/webapp/package-lock.json +++ b/webapp/package-lock.json @@ -1,6 +1,6 @@ { "name": "antares-web", - "version": "2.14.3", + "version": "2.14.4", "lockfileVersion": 1, "requires": true, "dependencies": { diff --git a/webapp/package.json b/webapp/package.json index 299cb2f497..f4e5c78a1f 100644 --- a/webapp/package.json +++ b/webapp/package.json @@ -1,6 +1,6 @@ { "name": "antares-web", - "version": "2.14.3", + "version": "2.14.4", "private": true, "dependencies": { "@emotion/react": "11.10.6", diff --git a/webapp/public/locales/en/main.json b/webapp/public/locales/en/main.json index 00be638824..39aa53a032 100644 --- a/webapp/public/locales/en/main.json +++ b/webapp/public/locales/en/main.json @@ -238,6 +238,9 @@ "study.results": "Results", "study.copyId": "Copy the study ID", "study.copyJobId": "Copy the job ID", + "study.outputFilters": "Output print status", + "study.outputFilters.filterByYear": "Output year by year", + "study.outputFilters.filterSynthesis": "Synthesis outputs", "study.modelization.links.hurdleCost": "Hurdle costs", "study.modelization.links.loopFlows": "Loop flows", "study.modelization.links.pst": "PST", diff --git a/webapp/public/locales/fr/main.json b/webapp/public/locales/fr/main.json index 94646e62e2..b11dd4bb84 100644 --- a/webapp/public/locales/fr/main.json +++ b/webapp/public/locales/fr/main.json @@ -238,14 +238,17 @@ "study.results": "Résultats", "study.copyId": "Copier l'identifiant de l'étude", "study.copyJobId": "Copier l'identifiant de la tâche", + "study.outputFilters": "Affichage des sorties", + "study.outputFilters.filterByYear": "Sorties année par année", + "study.outputFilters.filterSynthesis": "Sorties de la synthèse", "study.modelization.links.hurdleCost": "Hurdle costs", "study.modelization.links.loopFlows": "Loop flows", "study.modelization.links.pst": "PST", "study.modelization.links.type": "Type", "study.modelization.links.transmissionCapa": "Capacités de transmission", - "study.modelization.links.transmissionCapa.infinite": "Infinite", - "study.modelization.links.transmissionCapa.ignore": "Null", - "study.modelization.links.transmissionCapa.enabled": "Activé", + "study.modelization.links.transmissionCapa.infinite": "Infinie", + "study.modelization.links.transmissionCapa.ignore": "Nulle", + "study.modelization.links.transmissionCapa.enabled": "Activée", "study.modelization.links.type.ac": "AC", "study.modelization.links.type.dc": "DC", "study.modelization.links.type.gaz": "Gas", diff --git a/webapp/src/components/App/Singlestudy/explore/Results/ResultDetails/index.tsx b/webapp/src/components/App/Singlestudy/explore/Results/ResultDetails/index.tsx index 779823f1f6..abc29e9b4b 100644 --- a/webapp/src/components/App/Singlestudy/explore/Results/ResultDetails/index.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Results/ResultDetails/index.tsx @@ -8,7 +8,7 @@ import { } from "@mui/material"; import { useEffect, useMemo, useState } from "react"; import { useTranslation } from "react-i18next"; -import { useOutletContext, useParams } from "react-router"; +import { useNavigate, useOutletContext, useParams } from "react-router"; import axios from "axios"; import GridOffIcon from "@mui/icons-material/GridOff"; import DownloadOutlinedIcon from "@mui/icons-material/DownloadOutlined"; @@ -38,6 +38,7 @@ import UsePromiseCond, { } from "../../../../../common/utils/UsePromiseCond"; import useStudySynthesis from "../../../../../../redux/hooks/useStudySynthesis"; import { downloadMatrix } from "../../../../../../utils/matrixUtils"; +import ButtonBack from "../../../../../common/ButtonBack"; function ResultDetails() { const { study } = useOutletContext<{ study: StudyMetadata }>(); @@ -57,6 +58,7 @@ function ResultDetails() { const [selectedItemId, setSelectedItemId] = useState(""); const [searchValue, setSearchValue] = useState(""); const { t } = useTranslation(); + const navigate = useNavigate(); const items = useAppSelector((state) => itemType === OutputItemType.Areas @@ -152,6 +154,16 @@ function ResultDetails() { + navigate("..")} /> + + } mainContent={ <> (props: Props) { name={filterName as FieldPath} multiple options={filterOptions} - label={t(`study.modelization.nodeProperties.${filterName}`)} + label={t(`study.outputFilters.${filterName}`)} control={control} rules={{ onAutoSubmit: (value) => { @@ -48,7 +48,7 @@ function OutputFilters(props: Props) { ); return ( -
+
{renderFilter("filterSynthesis")} {renderFilter("filterByYear")}
diff --git a/webapp/src/components/common/ButtonBack.tsx b/webapp/src/components/common/ButtonBack.tsx index 6ba10a0411..0af4de5418 100644 --- a/webapp/src/components/common/ButtonBack.tsx +++ b/webapp/src/components/common/ButtonBack.tsx @@ -19,12 +19,12 @@ function ButtonBack(props: Props) { alignItems="center" boxSizing="border-box" > - } + variant="text" color="secondary" onClick={() => onClick()} - sx={{ cursor: "pointer" }} - /> - diff --git a/webapp/src/components/common/PropertiesView.tsx b/webapp/src/components/common/PropertiesView.tsx index 722db288b4..4fd90bb0b3 100644 --- a/webapp/src/components/common/PropertiesView.tsx +++ b/webapp/src/components/common/PropertiesView.tsx @@ -5,6 +5,7 @@ import SearchFE from "./fieldEditors/SearchFE"; import { mergeSxProp } from "../../utils/muiUtils"; interface PropsType { + topContent?: ReactNode; mainContent: ReactNode | undefined; secondaryContent?: ReactNode; onSearchFilterChange?: (value: string) => void; @@ -13,8 +14,14 @@ interface PropsType { } function PropertiesView(props: PropsType) { - const { onAdd, onSearchFilterChange, mainContent, secondaryContent, sx } = - props; + const { + onAdd, + onSearchFilterChange, + topContent, + mainContent, + secondaryContent, + sx, + } = props; return ( + {topContent} {onSearchFilterChange && ( )} diff --git a/webapp/src/redux/ducks/studySyntheses.ts b/webapp/src/redux/ducks/studySyntheses.ts index e8c470ca22..f086602c65 100644 --- a/webapp/src/redux/ducks/studySyntheses.ts +++ b/webapp/src/redux/ducks/studySyntheses.ts @@ -8,6 +8,7 @@ import * as RA from "ramda-adjunct"; import { FileStudyTreeConfigDTO, GenericInfo, + LaunchJobDTO, Link, LinkElement, WSMessage, @@ -139,10 +140,12 @@ export const setStudySynthesis = createAsyncThunk< }); export const refreshStudySynthesis = - (event: WSMessage): AppThunk => + (event: WSMessage): AppThunk => (dispatch, getState) => { const state = getState(); - const { id } = event.payload; + const id = + "study_id" in event.payload ? event.payload.study_id : event.payload.id; + if (getStudySynthesisIds(state).includes(id)) { dispatch(setStudySynthesis(id as string)); diff --git a/webapp/src/services/webSockets.ts b/webapp/src/services/webSockets.ts index d09f9a0d3b..a220bd9a46 100644 --- a/webapp/src/services/webSockets.ts +++ b/webapp/src/services/webSockets.ts @@ -2,6 +2,7 @@ import debug from "debug"; import * as RA from "ramda-adjunct"; import { GenericInfo, + LaunchJobDTO, StudySummary, UserInfo, WSEvent, @@ -60,6 +61,7 @@ export function initWebSocket( if (!globalListenerAdded) { messageListeners.push( makeStudyListener(dispatch), + makeStudyJobStatusListener(dispatch), makeMaintenanceListener(dispatch), makeStudyDataListener(dispatch) ); @@ -202,6 +204,26 @@ function makeStudyListener(dispatch: AppDispatch) { }; } +function makeStudyJobStatusListener(dispatch: AppDispatch): MessageListener { + const unsubscribeById: Record = {}; + + return function listener(e: WSMessage): void { + switch (e.type) { + case WSEvent.STUDY_JOB_STARTED: { + const unsubscribe = sendWsSubscribeMessage( + WsChannel.JobStatus + e.payload.id + ); + unsubscribeById[e.payload.id] = unsubscribe; + break; + } + case WSEvent.STUDY_JOB_COMPLETED: + unsubscribeById[e.payload.id]?.(); + dispatch(refreshStudySynthesis(e)); + break; + } + }; +} + function makeStudyDataListener(dispatch: AppDispatch) { return function listener(e: WSMessage): void { switch (e.type) {