Skip to content

Commit

Permalink
v2.14.6
Browse files Browse the repository at this point in the history
New bugfix release on `master`.
  • Loading branch information
laurent-laporte-pro authored Sep 1, 2023
2 parents 52e7fae + c0c968d commit 7503059
Show file tree
Hide file tree
Showing 416 changed files with 4,299 additions and 12,676 deletions.
44 changes: 24 additions & 20 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,18 +11,22 @@ jobs:
- name: Checkout github repo (+ download lfs dependencies)
uses: actions/checkout@v2
- name: Set up Python
uses: actions/setup-python@v1
uses: actions/setup-python@v2
with:
python-version: 3.8
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -r requirements-dev.txt
- uses: isort/isort-action@master
with:
sort-paths: antarest, tests
requirementsFiles: "requirements-dev.txt"
- name: Check with black
uses: psf/black@stable
with:
# Version of Black should match the versions set in `requirements-dev.txt`
version: "~=23.1.0"
version: "~=23.7.0"
options: --check --diff
- name: Check Typing (mypy)
#continue-on-error: true
Expand All @@ -34,13 +38,13 @@ jobs:
strategy:
max-parallel: 9
matrix:
os: [windows-latest, ubuntu-20.04]
os: [ windows-latest, ubuntu-20.04 ]

steps:
- name: Checkout github repo (+ download lfs dependencies)
uses: actions/checkout@v2
- name: Set up Python
uses: actions/setup-python@v1
uses: actions/setup-python@v2
with:
python-version: 3.8
- name: Install dependencies
Expand Down Expand Up @@ -98,20 +102,20 @@ jobs:

sonarcloud:
runs-on: ubuntu-20.04
needs: [python-test, npm-test]
needs: [ python-test, npm-test ]
steps:
- uses: actions/checkout@v2
- name: Download python coverage report
uses: actions/download-artifact@v3
with:
name: python-code-coverage-report
- name: Download js coverage report
uses: actions/download-artifact@v3
with:
name: js-code-coverage-report
path: webapp/coverage
- name: SonarCloud Scan
uses: sonarsource/sonarcloud-github-action@master
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
- uses: actions/checkout@v2
- name: Download python coverage report
uses: actions/download-artifact@v3
with:
name: python-code-coverage-report
- name: Download js coverage report
uses: actions/download-artifact@v3
with:
name: js-code-coverage-report
path: webapp/coverage
- name: SonarCloud Scan
uses: sonarsource/sonarcloud-github-action@master
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
2 changes: 1 addition & 1 deletion alembic.ini
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ sqlalchemy.url =
# hooks=black
# black.type=console_scripts
# black.entrypoint=black
# black.options=-l 79
# black.options=-l 120

# Logging configuration
[loggers]
Expand Down
4 changes: 2 additions & 2 deletions antarest/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,9 @@

# Standard project metadata

__version__ = "2.14.5"
__version__ = "2.14.6"
__author__ = "RTE, Antares Web Team"
__date__ = "2023-08-11"
__date__ = "2023-09-01"
# noinspection SpellCheckingInspection
__credits__ = "(c) Réseau de Transport de l’Électricité (RTE)"

Expand Down
16 changes: 5 additions & 11 deletions antarest/core/cache/business/local_chache.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
import logging
import threading
import time
from typing import Optional, Dict, List
from typing import Dict, List, Optional

from pydantic import BaseModel

from antarest.core.config import CacheConfig
from antarest.core.model import JSON
from antarest.core.interfaces.cache import ICache
from antarest.core.model import JSON

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -44,9 +44,7 @@ def checker(self) -> None:
for id in to_delete:
del self.cache[id]

def put(
self, id: str, data: JSON, duration: int = 3600
) -> None: # Duration in second
def put(self, id: str, data: JSON, duration: int = 3600) -> None: # Duration in second
with self.lock:
logger.info(f"Adding cache key {id}")
self.cache[id] = LocalCacheElement(
Expand All @@ -55,19 +53,15 @@ def put(
duration=duration,
)

def get(
self, id: str, refresh_duration: Optional[int] = None
) -> Optional[JSON]:
def get(self, id: str, refresh_duration: Optional[int] = None) -> Optional[JSON]:
res = None
with self.lock:
logger.info(f"Trying to retrieve cache key {id}")
if id in self.cache:
logger.info(f"Cache key {id} found")
if refresh_duration:
self.cache[id].duration = refresh_duration
self.cache[id].timeout = (
int(time.time()) + self.cache[id].duration
)
self.cache[id].timeout = int(time.time()) + self.cache[id].duration
res = self.cache[id].data
return res

Expand Down
16 changes: 5 additions & 11 deletions antarest/core/cache/business/redis_cache.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
import json
import logging
from typing import Optional, List
from typing import List, Optional

from pydantic import BaseModel
from redis.client import Redis

from antarest.core.model import JSON
from antarest.core.interfaces.cache import ICache
from antarest.core.model import JSON

logger = logging.getLogger(__name__)

Expand All @@ -30,23 +30,17 @@ def put(self, id: str, data: JSON, duration: int = 3600) -> None:
self.redis.set(redis_key, redis_element.json())
self.redis.expire(redis_key, duration)

def get(
self, id: str, refresh_timeout: Optional[int] = None
) -> Optional[JSON]:
def get(self, id: str, refresh_timeout: Optional[int] = None) -> Optional[JSON]:
redis_key = f"cache:{id}"
result = self.redis.get(redis_key)
logger.info(f"Trying to retrieve cache key {id}")
if result is not None:
logger.info(f"Cache key {id} found")
json_result = json.loads(result)
redis_element = RedisCacheElement(
duration=json_result["duration"], data=json_result["data"]
)
redis_element = RedisCacheElement(duration=json_result["duration"], data=json_result["data"])
self.redis.expire(
redis_key,
redis_element.duration
if refresh_timeout is None
else refresh_timeout,
redis_element.duration if refresh_timeout is None else refresh_timeout,
)
return redis_element.data
logger.info(f"Cache key {id} not found")
Expand Down
10 changes: 2 additions & 8 deletions antarest/core/cache/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,14 +11,8 @@
logger = logging.getLogger(__name__)


def build_cache(
config: Config, redis_client: Optional[Redis] = None # type: ignore
) -> ICache:
cache = (
RedisCache(redis_client)
if redis_client is not None
else LocalCache(config=config.cache)
)
def build_cache(config: Config, redis_client: Optional[Redis] = None) -> ICache: # type: ignore
cache = RedisCache(redis_client) if redis_client is not None else LocalCache(config=config.cache)
logger.info("Redis cache" if config.redis is not None else "Local cache")
cache.start()
return cache
66 changes: 17 additions & 49 deletions antarest/core/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
from typing import Any, Dict, List, Optional

import yaml

from antarest.core.model import JSON
from antarest.core.roles import RoleType

Expand All @@ -26,9 +27,7 @@ class ExternalAuthConfig:
def from_dict(data: JSON) -> "ExternalAuthConfig":
return ExternalAuthConfig(
url=data.get("url", None),
default_group_role=RoleType(
data.get("default_group_role", RoleType.READER.value)
),
default_group_role=RoleType(data.get("default_group_role", RoleType.READER.value)),
add_ext_groups=data.get("add_ext_groups", False),
group_mapping=data.get("group_mapping", {}),
)
Expand All @@ -51,9 +50,7 @@ def from_dict(data: JSON) -> "SecurityConfig":
jwt_key=data.get("jwt", {}).get("key", ""),
admin_pwd=data.get("login", {}).get("admin", {}).get("pwd", ""),
disabled=data.get("disabled", False),
external_auth=ExternalAuthConfig.from_dict(
data.get("external_auth", {})
),
external_auth=ExternalAuthConfig.from_dict(data.get("external_auth", {})),
)


Expand Down Expand Up @@ -135,26 +132,17 @@ def from_dict(data: JSON) -> "StorageConfig":
return StorageConfig(
tmp_dir=Path(data.get("tmp_dir", tempfile.gettempdir())),
matrixstore=Path(data["matrixstore"]),
workspaces={
n: WorkspaceConfig.from_dict(w)
for n, w in data["workspaces"].items()
},
workspaces={n: WorkspaceConfig.from_dict(w) for n, w in data["workspaces"].items()},
allow_deletion=data.get("allow_deletion", False),
archive_dir=Path(data["archive_dir"]),
watcher_lock=data.get("watcher_lock", True),
watcher_lock_delay=data.get("watcher_lock_delay", 10),
download_default_expiration_timeout_minutes=data.get(
"download_default_expiration_timeout_minutes", 1440
),
download_default_expiration_timeout_minutes=data.get("download_default_expiration_timeout_minutes", 1440),
matrix_gc_sleeping_time=data.get("matrix_gc_sleeping_time", 3600),
matrix_gc_dry_run=data.get("matrix_gc_dry_run", False),
auto_archive_threshold_days=data.get(
"auto_archive_threshold_days", 60
),
auto_archive_threshold_days=data.get("auto_archive_threshold_days", 60),
auto_archive_dry_run=data.get("auto_archive_dry_run", False),
auto_archive_sleeping_time=data.get(
"auto_archive_sleeping_time", 3600
),
auto_archive_sleeping_time=data.get("auto_archive_sleeping_time", 3600),
auto_archive_max_parallel=data.get("auto_archive_max_parallel", 5),
)

Expand Down Expand Up @@ -202,9 +190,7 @@ def from_dict(data: JSON) -> "SlurmConfig":
default_n_cpu=data["default_n_cpu"],
default_json_db_name=data["default_json_db_name"],
slurm_script_path=data["slurm_script_path"],
antares_versions_on_remote_server=data[
"antares_versions_on_remote_server"
],
antares_versions_on_remote_server=data["antares_versions_on_remote_server"],
max_cores=data.get("max_cores", 64),
)

Expand Down Expand Up @@ -301,9 +287,7 @@ class CacheConfig:
@staticmethod
def from_dict(data: JSON) -> "CacheConfig":
return CacheConfig(
checker_delay=float(data["checker_delay"])
if "checker_delay" in data
else 0.2,
checker_delay=float(data["checker_delay"]) if "checker_delay" in data else 0.2,
)


Expand All @@ -314,9 +298,7 @@ class RemoteWorkerConfig:

@staticmethod
def from_dict(data: JSON) -> "RemoteWorkerConfig":
return RemoteWorkerConfig(
name=data["name"], queues=data.get("queues", [])
)
return RemoteWorkerConfig(name=data["name"], queues=data.get("queues", []))


@dataclass(frozen=True)
Expand All @@ -331,9 +313,7 @@ class TaskConfig:
@staticmethod
def from_dict(data: JSON) -> "TaskConfig":
return TaskConfig(
max_workers=int(data["max_workers"])
if "max_workers" in data
else 5,
max_workers=int(data["max_workers"]) if "max_workers" in data else 5,
remote_workers=list(
map(
lambda x: RemoteWorkerConfig.from_dict(x),
Expand All @@ -355,9 +335,7 @@ class ServerConfig:
@staticmethod
def from_dict(data: JSON) -> "ServerConfig":
return ServerConfig(
worker_threadpool_size=int(data["worker_threadpool_size"])
if "worker_threadpool_size" in data
else 5,
worker_threadpool_size=int(data["worker_threadpool_size"]) if "worker_threadpool_size" in data else 5,
services=data.get("services", []),
)

Expand Down Expand Up @@ -403,21 +381,11 @@ def from_dict(data: JSON, res: Optional[Path] = None) -> "Config":
debug=data.get("debug", False),
resources_path=res or Path(),
root_path=data.get("root_path", ""),
redis=RedisConfig.from_dict(data["redis"])
if "redis" in data
else None,
eventbus=EventBusConfig.from_dict(data["eventbus"])
if "eventbus" in data
else EventBusConfig(),
cache=CacheConfig.from_dict(data["cache"])
if "cache" in data
else CacheConfig(),
tasks=TaskConfig.from_dict(data["tasks"])
if "tasks" in data
else TaskConfig(),
server=ServerConfig.from_dict(data["server"])
if "server" in data
else ServerConfig(),
redis=RedisConfig.from_dict(data["redis"]) if "redis" in data else None,
eventbus=EventBusConfig.from_dict(data["eventbus"]) if "eventbus" in data else EventBusConfig(),
cache=CacheConfig.from_dict(data["cache"]) if "cache" in data else CacheConfig(),
tasks=TaskConfig.from_dict(data["tasks"]) if "tasks" in data else TaskConfig(),
server=ServerConfig.from_dict(data["server"]) if "server" in data else ServerConfig(),
)

@staticmethod
Expand Down
10 changes: 3 additions & 7 deletions antarest/core/configdata/model.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
from enum import Enum
from typing import Optional, Any
from typing import Any, Optional

from pydantic import BaseModel
from sqlalchemy import Column, String, Integer, Boolean, DateTime, ForeignKey, Sequence # type: ignore
from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, Sequence, String # type: ignore
from sqlalchemy.orm import relationship # type: ignore

from antarest.core.persistence import Base
Expand All @@ -22,11 +22,7 @@ class ConfigData(Base): # type: ignore
def __eq__(self, other: Any) -> bool:
if not isinstance(other, ConfigData):
return False
return bool(
other.key == self.key
and other.value == self.value
and other.owner == self.owner
)
return bool(other.key == self.key and other.value == self.value and other.owner == self.owner)

def __repr__(self) -> str:
return f"key={self.key}, value={self.value}, owner={self.owner}"
Expand Down
Loading

0 comments on commit 7503059

Please sign in to comment.