Skip to content

Commit

Permalink
v2.18.0 (#2254)
Browse files Browse the repository at this point in the history
  • Loading branch information
skamril authored Nov 29, 2024
2 parents 6ff5520 + f933c62 commit e3e752b
Show file tree
Hide file tree
Showing 918 changed files with 74,347 additions and 61,860 deletions.
3 changes: 1 addition & 2 deletions .github/workflows/deploy.yml
Original file line number Diff line number Diff line change
Expand Up @@ -42,12 +42,11 @@ jobs:
- name: 🐍 Set up Python
uses: actions/setup-python@v5
with:
python-version: 3.8
python-version: 3.11

- name: 🐍 Install development dependencies
run: |
python -m pip install --upgrade pip
pip install pydantic --no-binary pydantic
pip install -r requirements-dev.txt
- name: 🐍 Install Windows dependencies
Expand Down
3 changes: 2 additions & 1 deletion .github/workflows/license_header.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ jobs:
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: 3.8
python-version: 3.11
- name: Install dependencies
run: |
python -m pip install --upgrade pip
Expand All @@ -22,4 +22,5 @@ jobs:
run: |
python license_checker_and_adder.py --path=../antarest/ --action=check-strict
python license_checker_and_adder.py --path=../tests/ --action=check-strict
python license_checker_and_adder.py --path=../webapp/src --action=check-strict
working-directory: scripts
6 changes: 3 additions & 3 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ jobs:
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: 3.8
python-version: 3.11
- name: Install dependencies
run: |
python -m pip install --upgrade pip
Expand Down Expand Up @@ -46,14 +46,14 @@ jobs:
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: 3.8
python-version: 3.11
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -r requirements-dev.txt
- name: Test with pytest
run: |
pytest --cov antarest --cov-report xml -n auto
pytest --cov antarest --cov-report xml -n auto --dist=worksteal
- name: Archive code coverage results
if: matrix.os == 'ubuntu-20.04'
uses: actions/upload-artifact@v4
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/worker.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ jobs:
- name: 🐍 Set up Python
uses: actions/setup-python@v5
with:
python-version: 3.8
python-version: 3.11

- name: 🐍 Install dependencies
run: |
Expand Down
27 changes: 17 additions & 10 deletions AntaresWebLinux.spec
Original file line number Diff line number Diff line change
@@ -1,25 +1,27 @@
# -*- mode: python ; coding: utf-8 -*-
from pathlib import Path
from PyInstaller.utils.hooks import collect_dynamic_libs

block_cipher = None

# We need to analyze all alembic files to be sure the migration phase works fine
migrations_dir = Path('alembic/versions')
migration_files = [str(f) for f in migrations_dir.iterdir() if f.is_file() and f.suffix == '.py']
# We need to analyze all alembic files to be sure the migration phase works fine:
# alembic loads version files by their path, so we need to add them as "data" to the package,
# but all the dependencies they use need to be included also, wo we need to perform a
# dedicated analyse for this.
versions_dir = Path('alembic/versions')
versions_files = [str(f) for f in versions_dir.iterdir() if f.is_file() and f.suffix == '.py']
alembic_analysis = Analysis(["alembic/env.py"] + versions_files)

binaries = [('./alembic.ini', './alembic.ini')] + collect_dynamic_libs('tables')

antares_web_server_a = Analysis(['antarest/gui.py', 'alembic/env.py'] + migration_files,
antares_web_server_a = Analysis(['antarest/gui.py'],
pathex=[],
binaries=binaries,
datas=[('./resources', './resources'), ('./alembic', './alembic')],
binaries=[],
datas=[('./resources', './resources'), ('./alembic', './alembic'), ('./alembic.ini', './')],
hiddenimports=[
'cmath',
'antarest.dbmodel',
'plyer.platforms.linux',
'plyer.platforms.linux.notification',
'pythonjsonlogger.jsonlogger',
'tables',
],
hookspath=['extra-hooks'],
hooksconfig={},
Expand All @@ -29,8 +31,13 @@ antares_web_server_a = Analysis(['antarest/gui.py', 'alembic/env.py'] + migratio
win_private_assemblies=False,
cipher=block_cipher,
noarchive=False)
antares_web_server_pyz = PYZ(antares_web_server_a.pure, antares_web_server_a.zipped_data,

all_python = antares_web_server_a.pure + alembic_analysis.pure
all_zipped_data = antares_web_server_a.zipped_data + alembic_analysis.zipped_data

antares_web_server_pyz = PYZ(all_python, all_zipped_data,
cipher=block_cipher)

antares_web_server_exe = EXE(antares_web_server_pyz,
antares_web_server_a.scripts,
[],
Expand Down
27 changes: 17 additions & 10 deletions AntaresWebWin.spec
Original file line number Diff line number Diff line change
@@ -1,25 +1,27 @@
# -*- mode: python ; coding: utf-8 -*-
from pathlib import Path
from PyInstaller.utils.hooks import collect_dynamic_libs

block_cipher = None

# We need to analyze all alembic files to be sure the migration phase works fine
migrations_dir = Path('alembic/versions')
migration_files = [str(f) for f in migrations_dir.iterdir() if f.is_file() and f.suffix == '.py']
# We need to analyze all alembic files to be sure the migration phase works fine:
# alembic loads version files by their path, so we need to add them as "data" to the package,
# but all the dependencies they use need to be included also, wo we need to perform a
# dedicated analyse for this.
versions_dir = Path('alembic/versions')
versions_files = [str(f) for f in versions_dir.iterdir() if f.is_file() and f.suffix == '.py']
alembic_analysis = Analysis(["alembic/env.py"] + versions_files)

binaries = [('./alembic.ini', './alembic.ini')] + collect_dynamic_libs('tables')

antares_web_server_a = Analysis(['antarest/gui.py', 'alembic/env.py'] + migration_files,
antares_web_server_a = Analysis(['antarest/gui.py'],
pathex=[],
binaries=binaries,
datas=[('./resources', './resources'), ('./alembic', './alembic')],
binaries=[],
datas=[('./resources', './resources'), ('./alembic', './alembic'), ('./alembic.ini', './')],
hiddenimports=[
'cmath',
'antarest.dbmodel',
'plyer.platforms.win',
'plyer.platforms.win.notification',
'pythonjsonlogger.jsonlogger',
'tables',
],
hookspath=['extra-hooks'],
hooksconfig={},
Expand All @@ -29,8 +31,13 @@ antares_web_server_a = Analysis(['antarest/gui.py', 'alembic/env.py'] + migratio
win_private_assemblies=False,
cipher=block_cipher,
noarchive=False)
antares_web_server_pyz = PYZ(antares_web_server_a.pure, antares_web_server_a.zipped_data,

all_python = antares_web_server_a.pure + alembic_analysis.pure
all_zipped_data = antares_web_server_a.zipped_data + alembic_analysis.zipped_data

antares_web_server_pyz = PYZ(all_python, all_zipped_data,
cipher=block_cipher)

antares_web_server_exe = EXE(antares_web_server_pyz,
antares_web_server_a.scripts,
[],
Expand Down
8 changes: 2 additions & 6 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM python:3.8-slim-bullseye
FROM python:3.11-slim-bullseye

# RUN apt update && apt install -y procps gdb

Expand All @@ -16,10 +16,6 @@ COPY ./scripts /scripts
COPY ./alembic /alembic
COPY ./alembic.ini /alembic.ini

RUN ./scripts/install-debug.sh

RUN pip3 install --upgrade pip \
&& pip3 install -r /conf/requirements.txt

RUN pip3 install --no-cache-dir --upgrade pip && pip3 install --no-cache-dir -r /conf/requirements.txt

ENTRYPOINT ["./scripts/start.sh"]
1 change: 0 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@ Install back-end dependencies

```shell script
python -m pip install --upgrade pip
pip install pydantic --no-binary pydantic
pip install -r requirements.txt # use requirements-dev.txt if building a single binary with pyinstaller
```

Expand Down
199 changes: 199 additions & 0 deletions alembic/versions/490b80a84bb5_add_cascade_delete_for_sqlite.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,199 @@
"""add_cascade_delete_for_sqlite
Revision ID: 490b80a84bb5
Revises: c0c4aaf84861
Create Date: 2024-10-11 11:38:45.108227
"""
from alembic import op
import sqlalchemy as sa

# revision identifiers, used by Alembic.
revision = '490b80a84bb5'
down_revision = 'c0c4aaf84861'
branch_labels = None
depends_on = None


def upgrade():
_migrate(upgrade=True)

def downgrade():
_migrate(upgrade=False)


def _migrate(upgrade: bool):
# Use on_cascade=DELETE to avoid foreign keys issues in SQLite.
# As it doesn't support dropping foreign keys, we have to do the migration ourselves.
# https://www.sqlite.org/lang_altertable.html#otheralter
# 1 - Create table with the right columns
# 2 - Copy all the data from the old table inside the new one
# 3 - Remove the old table
# 4 - Rename the new table to have the old name

dialect_name: str = op.get_context().dialect.name
if dialect_name == "postgresql":
return

# =============================
# STUDY_ADDITIONAL_DATA
# =============================

op.create_table('study_additional_data_copy',
sa.Column('study_id', sa.String(length=36), nullable=False),
sa.Column('author', sa.String(length=255), nullable=True),
sa.Column('horizon', sa.String(), nullable=True),
sa.Column('patch', sa.String(), nullable=True),
sa.ForeignKeyConstraint(['study_id'], ['study.id'], ondelete='CASCADE' if upgrade else None),
sa.PrimaryKeyConstraint('study_id')
)
bind = op.get_bind()
content = bind.execute("SELECT * FROM study_additional_data")
for row in content:
bind.execute(
"INSERT INTO study_additional_data_copy (study_id, author, horizon, patch) VALUES (?,?,?,?)",
(row[0], row[1], row[2], row[3])
)
op.drop_table("study_additional_data")
op.rename_table("study_additional_data_copy", "study_additional_data")

# =============================
# RAW_METADATA
# =============================

op.create_table('rawstudycopy',
sa.Column('id', sa.String(length=36), nullable=False),
sa.Column('content_status', sa.Enum('VALID', 'WARNING', 'ERROR', name='studycontentstatus'),
nullable=True),
sa.Column('workspace', sa.String(length=255), nullable=False),
sa.Column('missing', sa.String(length=255), nullable=True),
sa.ForeignKeyConstraint(['id'], ['study.id'], ondelete='CASCADE' if upgrade else None),
sa.PrimaryKeyConstraint('id')
)
with op.batch_alter_table("rawstudycopy", schema=None) as batch_op:
if upgrade:
batch_op.create_index(batch_op.f("ix_rawstudycopy_missing"), ["missing"], unique=False)
batch_op.create_index(batch_op.f("ix_rawstudycopy_workspace"), ["workspace"], unique=False)
else:
batch_op.drop_index(batch_op.f("ix_rawstudycopy_missing"))
batch_op.drop_index(batch_op.f("ix_rawstudycopy_workspace"))

bind = op.get_bind()
content = bind.execute("SELECT * FROM rawstudy")
for row in content:
bind.execute(
"INSERT INTO rawstudycopy (id, content_status, workspace, missing) VALUES (?,?,?,?)",
(row[0], row[1], row[2], row[3])
)
op.drop_table("rawstudy")
op.rename_table("rawstudycopy", "rawstudy")

# =============================
# COMMAND BLOCK
# =============================

op.create_table(
"commandblock_copy",
sa.Column("id", sa.String(length=36), nullable=False),
sa.Column("study_id", sa.String(length=36), nullable=True),
sa.Column("block_index", sa.Integer(), nullable=True),
sa.Column("command", sa.String(length=255), nullable=True),
sa.Column("version", sa.Integer(), nullable=True),
sa.Column("args", sa.String(), nullable=True),
sa.ForeignKeyConstraint(
["study_id"],
["variantstudy.id"],
ondelete="CASCADE" if upgrade else None
),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("id"),
)
bind = op.get_bind()
content = bind.execute("SELECT * FROM commandblock")
for row in content:
bind.execute(
"INSERT INTO commandblock_copy (id, study_id, block_index, command, version, args) VALUES (?,?,?,?,?,?)",
(row[0], row[1], row[2], row[3], row[4], row[5])
)
op.alter_column(table_name="commandblock_copy", column_name="block_index", new_column_name="index")
op.drop_table("commandblock")
op.rename_table("commandblock_copy", "commandblock")

# =============================
# VARIANT STUDY SNAPSHOT
# =============================

op.create_table(
"variant_study_snapshot_copy",
sa.Column("id", sa.String(length=36), nullable=False),
sa.Column("created_at", sa.DateTime(), nullable=True),
sa.Column('last_executed_command', sa.String(), nullable=True),
sa.ForeignKeyConstraint(
["id"],
["variantstudy.id"],
ondelete="CASCADE" if upgrade else None
),
sa.PrimaryKeyConstraint("id"),
)
bind = op.get_bind()
content = bind.execute("SELECT * FROM variant_study_snapshot")
for row in content:
bind.execute(
"INSERT INTO variant_study_snapshot_copy (id, created_at, last_executed_command) VALUES (?,?,?)",
(row[0], row[1], row[2])
)
op.drop_table("variant_study_snapshot")
op.rename_table("variant_study_snapshot_copy", "variant_study_snapshot")

# =============================
# VARIANT STUDY
# =============================

op.create_table(
"variantstudy_copy",
sa.Column("id", sa.String(length=36), nullable=False),
sa.Column('generation_task', sa.String(), nullable=True),
sa.ForeignKeyConstraint(
["id"],
["study.id"],
ondelete="CASCADE" if upgrade else None
),
sa.PrimaryKeyConstraint("id"),
)
bind = op.get_bind()
content = bind.execute("SELECT * FROM variantstudy")
for row in content:
bind.execute(
"INSERT INTO variantstudy_copy (id, generation_task) VALUES (?,?)",
(row[0], row[1])
)
op.drop_table("variantstudy")
op.rename_table("variantstudy_copy", "variantstudy")

# =============================
# GROUP METADATA
# =============================

op.create_table('groupmetadatacopy',
sa.Column('group_id', sa.String(length=36), nullable=False),
sa.Column('study_id', sa.String(length=36), nullable=False),
sa.ForeignKeyConstraint(['group_id'], ['groups.id'], ondelete="CASCADE" if upgrade else None),
sa.ForeignKeyConstraint(['study_id'], ['study.id'], ondelete="CASCADE" if upgrade else None)
)
with op.batch_alter_table("groupmetadatacopy", schema=None) as batch_op:
if upgrade:
batch_op.create_index(batch_op.f("ix_groupmetadatacopy_group_id"), ["group_id"], unique=False)
batch_op.create_index(batch_op.f("ix_groupmetadatacopy_study_id"), ["study_id"], unique=False)
else:
batch_op.drop_index(batch_op.f("ix_groupmetadatacopy_group_id"))
batch_op.drop_index(batch_op.f("ix_groupmetadatacopy_study_id"))
bind = op.get_bind()
content = bind.execute("SELECT * FROM group_metadata")
for row in content:
bind.execute(
"INSERT INTO groupmetadatacopy (group_id, study_id) VALUES (?,?)",
(row[0], row[1])
)
op.drop_table("group_metadata")
op.rename_table("groupmetadatacopy", "group_metadata")

Loading

0 comments on commit e3e752b

Please sign in to comment.