Skip to content

Commit

Permalink
Merge branch 'release/2.15.2' into dev
Browse files Browse the repository at this point in the history
# Conflicts:
#	antarest/study/storage/rawstudy/model/filesystem/root/input/bindingconstraints/bindingcontraints.py
#	antarest/study/storage/variantstudy/model/command/update_binding_constraint.py
#	tests/variantstudy/model/command/test_manage_binding_constraints.py
  • Loading branch information
laurent-laporte-pro committed Oct 11, 2023
2 parents bb6d5c6 + b7ba8b4 commit 1de036a
Show file tree
Hide file tree
Showing 37 changed files with 511 additions and 323 deletions.
4 changes: 2 additions & 2 deletions antarest/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,9 @@

# Standard project metadata

__version__ = "2.15.1"
__version__ = "2.15.2"
__author__ = "RTE, Antares Web Team"
__date__ = "2023-10-05"
__date__ = "2023-10-11"
# noinspection SpellCheckingInspection
__credits__ = "(c) Réseau de Transport de l’Électricité (RTE)"

Expand Down
1 change: 1 addition & 0 deletions antarest/core/cache/business/redis_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ def __init__(self, redis_client: Redis): # type: ignore
self.redis = redis_client

def start(self) -> None:
# Assuming the Redis service is already running; no need to start it here.
pass

def put(self, id: str, data: JSON, duration: int = 3600) -> None:
Expand Down
13 changes: 10 additions & 3 deletions antarest/core/filetransfer/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,15 +13,15 @@ class FileDownloadNotFound(HTTPException):
def __init__(self) -> None:
super().__init__(
HTTPStatus.NOT_FOUND,
f"Requested download file was not found. It must have expired",
"Requested download file was not found. It must have expired",
)


class FileDownloadNotReady(HTTPException):
def __init__(self) -> None:
super().__init__(
HTTPStatus.NOT_ACCEPTABLE,
f"Requested file is not ready for download.",
"Requested file is not ready for download.",
)


Expand Down Expand Up @@ -70,4 +70,11 @@ def to_dto(self) -> FileDownloadDTO:
)

def __repr__(self) -> str:
return f"(id={self.id},name={self.name},filename={self.filename},path={self.path},ready={self.ready},expiration_date={self.expiration_date})"
return (
f"(id={self.id},"
f" name={self.name},"
f" filename={self.filename},"
f" path={self.path},"
f" ready={self.ready},"
f" expiration_date={self.expiration_date})"
)
8 changes: 5 additions & 3 deletions antarest/core/logging/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,12 +124,14 @@ def configure_logger(config: Config, handler_cls: str = "logging.FileHandler") -
"filters": ["context"],
}
elif handler_cls == "logging.handlers.TimedRotatingFileHandler":
# 90 days = 3 months
# keep only 1 backup (0 means keep all)
logging_config["handlers"]["default"] = {
"class": handler_cls,
"filename": config.logging.logfile,
"when": "D", # D = day
"interval": 90, # 90 days = 3 months
"backupCount": 1, # keep only 1 backup (0 means keep all)
"when": "D",
"interval": 90,
"backupCount": 1,
"encoding": "utf-8",
"delay": False,
"utc": False,
Expand Down
16 changes: 10 additions & 6 deletions antarest/core/tasks/service.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ def await_task(self, task_id: str, timeout_sec: Optional[int] = None) -> None:

# noinspection PyUnusedLocal
def noop_notifier(message: str) -> None:
pass
"""This function is used in tasks when no notification is required."""


DEFAULT_AWAIT_MAX_TIMEOUT = 172800
Expand Down Expand Up @@ -121,7 +121,7 @@ async def _await_task_end(event: Event) -> None:

return _await_task_end

# todo: Is `logger_` parameter required? (consider refactoring)
# noinspection PyUnusedLocal
def _send_worker_task(logger_: TaskUpdateNotifier) -> TaskResult:
listener_id = self.event_bus.add_listener(
_create_awaiter(task_result_wrapper),
Expand Down Expand Up @@ -338,14 +338,18 @@ def _run_task(
result.message,
result.return_value,
)
event_type = {True: EventType.TASK_COMPLETED, False: EventType.TASK_FAILED}[result.success]
event_msg = {True: "completed", False: "failed"}[result.success]
self.event_bus.push(
Event(
type=EventType.TASK_COMPLETED if result.success else EventType.TASK_FAILED,
type=event_type,
payload=TaskEventPayload(
id=task_id,
message=custom_event_messages.end
if custom_event_messages is not None
else f'Task {task_id} {"completed" if result.success else "failed"}',
message=(
custom_event_messages.end
if custom_event_messages is not None
else f"Task {task_id} {event_msg}"
),
).dict(),
permissions=PermissionInfo(public_mode=PublicMode.READ),
channel=EventChannelDirectory.TASK + task_id,
Expand Down
9 changes: 5 additions & 4 deletions antarest/launcher/adapters/local_launcher/local_launcher.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import io
import logging
import shutil
import signal
Expand All @@ -6,15 +7,15 @@
import threading
import time
from pathlib import Path
from typing import IO, Callable, Dict, Optional, Tuple, cast
from typing import Callable, Dict, Optional, Tuple, cast
from uuid import UUID

from antarest.core.config import Config
from antarest.core.interfaces.cache import ICache
from antarest.core.interfaces.eventbus import IEventBus
from antarest.core.requests import RequestParameters
from antarest.launcher.adapters.abstractlauncher import AbstractLauncher, LauncherCallbacks, LauncherInitException
from antarest.launcher.adapters.log_manager import LogTailManager
from antarest.launcher.adapters.log_manager import follow
from antarest.launcher.model import JobStatus, LauncherParametersDTO, LogType

logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -133,8 +134,8 @@ def stop_reading_output() -> bool:
)

thread = threading.Thread(
target=lambda: LogTailManager.follow(
cast(IO[str], process.stdout),
target=lambda: follow(
cast(io.StringIO, process.stdout),
self.create_update_log(str(uuid)),
stop_reading_output,
None,
Expand Down
78 changes: 38 additions & 40 deletions antarest/launcher/adapters/log_manager.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
import contextlib
import io
import logging
import time
from pathlib import Path
from threading import Thread
from typing import IO, Callable, Dict, Optional
from typing import Callable, Dict, Optional, cast

logger = logging.getLogger(__name__)

Expand All @@ -11,7 +13,7 @@ class LogTailManager:
BATCH_SIZE = 10

def __init__(self, log_base_dir: Path) -> None:
logger.info(f"Initiating Log manager")
logger.info("Initiating Log manager")
self.log_base_dir = log_base_dir
self.tracked_logs: Dict[str, Thread] = {}

Expand Down Expand Up @@ -47,43 +49,6 @@ def stop_tracking(self, log_path: Optional[Path]) -> None:
if log_path_key in self.tracked_logs:
del self.tracked_logs[log_path_key]

@staticmethod
def follow(
io: IO[str],
handler: Callable[[str], None],
stop: Callable[[], bool],
log_file: Optional[str],
) -> None:
line = ""
line_count = 0

while True:
if stop():
break
tmp = io.readline()
if not tmp:
if line:
logger.debug(f"Calling handler for {log_file}")
try:
handler(line)
except Exception as e:
logger.error("Could not handle this log line", exc_info=e)
line = ""
line_count = 0
time.sleep(0.1)
else:
line += tmp
if line.endswith("\n"):
line_count += 1
if line_count >= LogTailManager.BATCH_SIZE:
logger.debug(f"Calling handler for {log_file}")
try:
handler(line)
except Exception as e:
logger.error("Could not handle this log line", exc_info=e)
line = ""
line_count = 0

def _follow(
self,
log_file: Optional[Path],
Expand All @@ -97,4 +62,37 @@ def _follow(

with open(log_file, "r") as fh:
logger.info(f"Scanning {log_file}")
LogTailManager.follow(fh, handler, stop, str(log_file))
follow(cast(io.StringIO, fh), handler, stop, str(log_file))


def follow(
file: io.StringIO,
handler: Callable[[str], None],
stop: Callable[[], bool],
log_file: Optional[str],
) -> None:
line = ""
line_count = 0

while True:
if stop():
break
tmp = file.readline()
if tmp:
line += tmp
if line.endswith("\n"):
line_count += 1
if line_count >= LogTailManager.BATCH_SIZE:
logger.debug(f"Calling handler for {log_file}")
with contextlib.suppress(Exception):
handler(line)
line = ""
line_count = 0
else:
if line:
logger.debug(f"Calling handler for {log_file}")
with contextlib.suppress(Exception):
handler(line)
line = ""
line_count = 0
time.sleep(0.1)
2 changes: 1 addition & 1 deletion antarest/launcher/service.py
Original file line number Diff line number Diff line change
Expand Up @@ -176,7 +176,7 @@ def update(
channel=EventChannelDirectory.JOB_STATUS + job_result.id,
)
)
logger.info(f"Study status set")
logger.info("Study status set")

def append_log(self, job_id: str, message: str, log_type: JobLogType) -> None:
try:
Expand Down
4 changes: 2 additions & 2 deletions antarest/login/web.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ def users_get_all(
details: Optional[bool] = False,
current_user: JWTUser = Depends(auth.get_current_user),
) -> Any:
logger.info(f"Fetching users list", extra={"user": current_user.id})
logger.info("Fetching users list", extra={"user": current_user.id})
params = RequestParameters(user=current_user)
return service.get_all_users(params, details)

Expand Down Expand Up @@ -188,7 +188,7 @@ def groups_get_all(
details: Optional[bool] = False,
current_user: JWTUser = Depends(auth.get_current_user),
) -> Any:
logger.info(f"Fetching groups list", extra={"user": current_user.id})
logger.info("Fetching groups list", extra={"user": current_user.id})
params = RequestParameters(user=current_user)
return service.get_all_groups(params, details)

Expand Down
4 changes: 2 additions & 2 deletions antarest/matrixstore/web.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ def create(
matrix: List[List[MatrixData]] = Body(description="matrix dto", default=[]),
current_user: JWTUser = Depends(auth.get_current_user),
) -> Any:
logger.info(f"Creating new matrix", extra={"user": current_user.id})
logger.info("Creating new matrix", extra={"user": current_user.id})
if current_user.id is not None:
return service.create(matrix)
raise UserHasNotPermissionError()
Expand All @@ -60,7 +60,7 @@ def create_by_importation(

@bp.get("/matrix/{id}", tags=[APITag.matrix], response_model=MatrixDTO)
def get(id: str, user: JWTUser = Depends(auth.get_current_user)) -> Any:
logger.info(f"Fetching matrix", extra={"user": user.id})
logger.info("Fetching matrix", extra={"user": user.id})
if user.id is not None:
return service.get(id)
raise UserHasNotPermissionError()
Expand Down
2 changes: 1 addition & 1 deletion antarest/study/business/xpansion_management.py
Original file line number Diff line number Diff line change
Expand Up @@ -436,7 +436,7 @@ def _assert_candidate_is_correct(
xpansion_candidate_dto: XpansionCandidateDTO,
new_name: bool = False,
) -> None:
logger.info(f"Checking given candidate is correct")
logger.info("Checking given candidate is correct")
self._assert_no_illegal_character_is_in_candidate_name(xpansion_candidate_dto.name)
if new_name:
self._assert_candidate_name_is_not_already_taken(candidates, xpansion_candidate_dto.name)
Expand Down
17 changes: 11 additions & 6 deletions antarest/study/service.py
Original file line number Diff line number Diff line change
Expand Up @@ -641,15 +641,20 @@ def create_study(

def get_user_name(self, params: RequestParameters) -> str:
"""
Args: params : Request parameters
Retrieves the name of a user based on the provided request parameters.
Returns: The user's name
Args:
params: The request parameters which includes user information.
Returns:
Returns the user's name or, if the logged user is a "bot"
(i.e., an application's token), it returns the token's author name.
"""
author = "Unknown"
if params.user:
if curr_user := self.user_service.get_user(params.user.id, params):
author = curr_user.to_dto().name
return author
user_id = params.user.impersonator if params.user.type == "bots" else params.user.id
if curr_user := self.user_service.get_user(user_id, params):
return curr_user.to_dto().name
return "Unknown"

def get_study_synthesis(self, study_id: str, params: RequestParameters) -> FileStudyTreeConfigDTO:
"""
Expand Down
Original file line number Diff line number Diff line change
@@ -1,19 +1,32 @@
from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import BindingConstraintFrequency
from antarest.study.storage.rawstudy.model.filesystem.folder_node import FolderNode
from antarest.study.storage.rawstudy.model.filesystem.inode import TREE
from antarest.study.storage.rawstudy.model.filesystem.matrix.input_series_matrix import InputSeriesMatrix
from antarest.study.storage.rawstudy.model.filesystem.matrix.matrix import MatrixFrequency
from antarest.study.storage.rawstudy.model.filesystem.root.input.bindingconstraints.bindingconstraints_ini import (
BindingConstraintsIni,
)
from antarest.study.storage.variantstudy.business.matrix_constants.binding_constraint.series import (
default_binding_constraint_daily,
default_binding_constraint_hourly,
default_binding_constraint_weekly,
)


class BindingConstraints(FolderNode):
def build(self) -> TREE:
default_matrices = {
BindingConstraintFrequency.HOURLY: default_binding_constraint_hourly,
BindingConstraintFrequency.DAILY: default_binding_constraint_daily,
BindingConstraintFrequency.WEEKLY: default_binding_constraint_weekly,
}
children: TREE = {
binding.id: InputSeriesMatrix(
self.context,
self.config.next_file(f"{binding.id}.txt"),
MatrixFrequency(binding.time_step.value),
freq=MatrixFrequency(binding.time_step),
nb_columns=3,
default_empty=default_matrices[binding.time_step],
)
for binding in self.config.bindings
}
Expand Down
Original file line number Diff line number Diff line change
@@ -1 +1 @@
from . import hydro, link, prepro, st_storage, thermals
from . import binding_constraint, hydro, link, prepro, st_storage, thermals
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
from . import series
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
import numpy as np

default_binding_constraint_hourly = np.zeros((8760, 3), dtype=np.float64)
default_binding_constraint_hourly.flags.writeable = False

default_binding_constraint_daily = np.zeros((365, 3), dtype=np.float64)
default_binding_constraint_daily.flags.writeable = False

default_binding_constraint_weekly = np.zeros((52, 3), dtype=np.float64)
default_binding_constraint_weekly.flags.writeable = False
Loading

0 comments on commit 1de036a

Please sign in to comment.