diff --git a/README.md b/README.md index 9603378..9a9c0c3 100644 --- a/README.md +++ b/README.md @@ -27,14 +27,34 @@ A REST API to execute [teuthology commands](https://docs.ceph.com/projects/teuth TEUTHOLOGY_API_SERVER_HOST: 0.0.0.0 TEUTHOLOGY_API_SERVER_PORT: 8080 PADDLES_URL: http://localhost:8080 + TEUTHOLOGY_API_SQLALCHEMY_URL: postgresql+psycopg2://admin:password@tapi_db:5432/tapi_db depends_on: - teuthology - paddles + - tapi_db links: - teuthology - paddles + - tapi_db healthcheck: test: [ "CMD", "curl", "-f", "http://0.0.0.0:8082" ] + tapi_db: + image: postgres:14 + healthcheck: + test: [ "CMD", "pg_isready", "-q", "-d", "tapi_db", "-U", "admin" ] + timeout: 5s + interval: 10s + retries: 2 + environment: + - POSTGRES_USER=root + - POSTGRES_PASSWORD=password + - APP_DB_USER=admin + - APP_DB_PASS=password + - APP_DB_NAME=tapi_db + volumes: + - ./db:/docker-entrypoint-initdb.d/ + ports: + - 5433:5432 ``` [optional] For developement use: diff --git a/alembic.ini b/alembic.ini new file mode 100644 index 0000000..608ca2b --- /dev/null +++ b/alembic.ini @@ -0,0 +1,115 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts +script_location = alembic + +# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s +# Uncomment the line below if you want the files to be prepended with date and time +# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file +# for all available tokens +# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s + +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. +prepend_sys_path = . + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the python-dateutil library that can be +# installed by adding `alembic[tz]` to the pip requirements +# string value is passed to dateutil.tz.gettz() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the +# "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to alembic/versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# The path separator used here should be the separator specified by "version_path_separator" below. +# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions + +# version path separator; As mentioned above, this is the character used to split +# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. +# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. +# Valid values for version_path_separator are: +# +# version_path_separator = : +# version_path_separator = ; +# version_path_separator = space +version_path_separator = os # Use os.pathsep. Default configuration used for new projects. + +# set to 'true' to search source files recursively +# in each "version_locations" directory +# new in Alembic version 1.10 +# recursive_version_locations = false + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +sqlalchemy.url = + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks = black +# black.type = console_scripts +# black.entrypoint = black +# black.options = -l 79 REVISION_SCRIPT_FILENAME + +# lint with attempts to fix using "ruff" - use the exec runner, execute a binary +# hooks = ruff +# ruff.type = exec +# ruff.executable = %(here)s/.venv/bin/ruff +# ruff.options = --fix REVISION_SCRIPT_FILENAME + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/alembic/README b/alembic/README new file mode 100644 index 0000000..98e4f9c --- /dev/null +++ b/alembic/README @@ -0,0 +1 @@ +Generic single-database configuration. \ No newline at end of file diff --git a/alembic/env.py b/alembic/env.py new file mode 100644 index 0000000..badebf8 --- /dev/null +++ b/alembic/env.py @@ -0,0 +1,79 @@ +from logging.config import fileConfig +import os + +from sqlalchemy import engine_from_config +from sqlalchemy import pool +from alembic import context + +from src import models + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +config.set_main_option("sqlalchemy.url", models.DATABASE_URL) + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +if config.config_file_name is not None: + fileConfig(config.config_file_name) + +# add your model's MetaData object here +target_metadata = models.Base.metadata + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def run_migrations_offline() -> None: + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online() -> None: + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + connectable = engine_from_config( + config.get_section(config.config_ini_section, {}), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + context.configure( + connection=connection, target_metadata=target_metadata + ) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/alembic/script.py.mako b/alembic/script.py.mako new file mode 100644 index 0000000..fbc4b07 --- /dev/null +++ b/alembic/script.py.mako @@ -0,0 +1,26 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision: str = ${repr(up_revision)} +down_revision: Union[str, None] = ${repr(down_revision)} +branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} +depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} + + +def upgrade() -> None: + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + ${downgrades if downgrades else "pass"} diff --git a/alembic/versions/0684e3c62926_add_presets_table.py b/alembic/versions/0684e3c62926_add_presets_table.py new file mode 100644 index 0000000..8b40c9e --- /dev/null +++ b/alembic/versions/0684e3c62926_add_presets_table.py @@ -0,0 +1,40 @@ +"""Add presets table + +Revision ID: 0684e3c62926 +Revises: +Create Date: 2023-09-07 12:56:56.526870 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '0684e3c62926' +down_revision: Union[str, None] = None +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('presets', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('username', sa.String(), nullable=True), + sa.Column('name', sa.String(), nullable=True), + sa.Column('suite', sa.String(), nullable=True), + sa.Column('cmd', sa.String(), nullable=True), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('username', 'name') + ) + op.create_index(op.f('ix_presets_username'), 'presets', ['username'], unique=False) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index(op.f('ix_presets_username'), table_name='presets') + op.drop_table('presets') + # ### end Alembic commands ### diff --git a/alembic/versions/0c784180661e_add_auto_suite_table.py b/alembic/versions/0c784180661e_add_auto_suite_table.py new file mode 100644 index 0000000..887a56d --- /dev/null +++ b/alembic/versions/0c784180661e_add_auto_suite_table.py @@ -0,0 +1,46 @@ +"""add auto_suite table + +Revision ID: 0c784180661e +Revises: 0684e3c62926 +Create Date: 2023-09-26 19:16:30.231745 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '0c784180661e' +down_revision: Union[str, None] = '0684e3c62926' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('auto_suite', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('username', sa.String(), nullable=True), + sa.Column('status', sa.String(), nullable=True), + sa.Column('created_at', sa.DateTime(timezone=True), nullable=True), + sa.Column('scheduling_started_at', sa.DateTime(timezone=True), nullable=True), + sa.Column('branch', sa.String(), nullable=True), + sa.Column('distro', sa.String(), nullable=True), + sa.Column('distro_version', sa.String(), nullable=True), + sa.Column('flavor', sa.String(), nullable=True), + sa.Column('suite', sa.String(), nullable=True), + sa.Column('log_path', sa.String(), nullable=True), + sa.Column('cmd', sa.String(), nullable=True), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_auto_suite_id'), 'auto_suite', ['id'], unique=False) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index(op.f('ix_auto_suite_id'), table_name='auto_suite') + op.drop_table('auto_suite') + # ### end Alembic commands ### diff --git a/gh-actions/start.sh b/gh-actions/start.sh index b7552e2..e58a88f 100755 --- a/gh-actions/start.sh +++ b/gh-actions/start.sh @@ -11,14 +11,34 @@ if [ ! -d "$folder" ] ; then environment: TEUTHOLOGY_API_SERVER_HOST: 0.0.0.0 TEUTHOLOGY_API_SERVER_PORT: 8080 + TEUTHOLOGY_API_SQLALCHEMY_URL: postgresql+psycopg2://admin:password@tapi_db:5432/tapi_db depends_on: - teuthology - paddles + - tapi_db links: - teuthology - paddles + - tapi_db healthcheck: test: [ "CMD", "curl", "-f", "http://0.0.0.0:8082" ] + tapi_db: + image: postgres:14 + healthcheck: + test: [ "CMD", "pg_isready", "-q", "-d", "tapi_db", "-U", "admin" ] + timeout: 5s + interval: 10s + retries: 2 + environment: + - POSTGRES_USER=root + - POSTGRES_PASSWORD=password + - APP_DB_USER=admin + - APP_DB_PASS=password + - APP_DB_NAME=tapi_db + volumes: + - ./db:/docker-entrypoint-initdb.d/ + ports: + - 5433:5432 " >> teuthology/docs/docker-compose/docker-compose.yml fi cd teuthology/docs/docker-compose diff --git a/requirements.txt b/requirements.txt index 4c32b7b..3cf8732 100644 --- a/requirements.txt +++ b/requirements.txt @@ -8,3 +8,6 @@ pydantic-settings # Temporarily, using teuthology without monkey patching the thread git+https://github.com/ceph/teuthology@teuth-api#egg=teuthology[test] # Original: git+https://github.com/ceph/teuthology#egg=teuthology[test] +fastapi-sqlalchemy +alembic +psycopg2-binary \ No newline at end of file diff --git a/src/main.py b/src/main.py index b04fba8..96c68b9 100644 --- a/src/main.py +++ b/src/main.py @@ -3,7 +3,7 @@ from fastapi import FastAPI, Request from fastapi.middleware.cors import CORSMiddleware from starlette.middleware.sessions import SessionMiddleware -from routes import suite, kill, login, logout +from routes import suite, kill, login, logout, presets, auto_suite from dotenv import load_dotenv load_dotenv() @@ -39,3 +39,5 @@ def read_root(request: Request): app.include_router(kill.router) app.include_router(login.router) app.include_router(logout.router) +app.include_router(presets.router) +app.include_router(auto_suite.router) diff --git a/src/models/__init__.py b/src/models/__init__.py new file mode 100644 index 0000000..005c15b --- /dev/null +++ b/src/models/__init__.py @@ -0,0 +1,22 @@ +from sqlalchemy import create_engine +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import sessionmaker, Session +import os + +DATABASE_URL = os.getenv("TEUTHOLOGY_API_SQLALCHEMY_URL") +engine = create_engine(DATABASE_URL) + +SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) +Base = declarative_base() + + +def get_db() -> Session: + db = SessionLocal() + try: + yield db + finally: + db.close() + + +from .presets import Presets +from .auto_suite import AutoSuite diff --git a/src/models/auto_suite.py b/src/models/auto_suite.py new file mode 100644 index 0000000..3fb0865 --- /dev/null +++ b/src/models/auto_suite.py @@ -0,0 +1,19 @@ +from sqlalchemy import Column, DateTime, Integer, String +from sqlalchemy.orm import Session +from . import Base + + +class AutoSuite(Base): + __tablename__ = "auto_suite" + id = Column(Integer, primary_key=True, index=True) + username = Column(String) + status = Column(String) + created_at = Column(DateTime(timezone=True)) + scheduling_started_at = Column(DateTime(timezone=True)) + branch = Column(String) + distro = Column(String) + distro_version = Column(String) + flavor = Column(String) + suite = Column(String) + log_path = Column(String) + cmd = Column(String) diff --git a/src/models/presets.py b/src/models/presets.py new file mode 100644 index 0000000..fcccfa6 --- /dev/null +++ b/src/models/presets.py @@ -0,0 +1,13 @@ +from sqlalchemy import Column, Integer, String, UniqueConstraint +from . import Base + + +class Presets(Base): + __tablename__ = "presets" + id = Column(Integer, primary_key=True) + username = Column(String, index=True) + name = Column(String) + suite = Column(String) + cmd = Column(String) + + __table_args__ = (UniqueConstraint("username", "name"),) diff --git a/src/routes/auto_suite.py b/src/routes/auto_suite.py new file mode 100644 index 0000000..efade8a --- /dev/null +++ b/src/routes/auto_suite.py @@ -0,0 +1,39 @@ +import logging +from fastapi import APIRouter, HTTPException, Depends +from sqlalchemy.orm import Session + +from models import get_db +from services.auto_suite import AutoSuiteService, auto_schedule +from schemas.auto_suite import AutoSuiteSchema, BuildStatusWebhook + +log = logging.getLogger(__name__) + +router = APIRouter( + prefix="/auto-schedule", + tags=["auto-schedule"], +) + + +@router.post("/", status_code=200) +def create_auto_suite(payload: AutoSuiteSchema, db: Session = Depends(get_db)): + return AutoSuiteService(db).create(payload.model_dump()) + + +@router.get("/", status_code=200) +def get_auto_suite(username: str, db: Session = Depends(get_db)): + db_record = AutoSuiteService(db).get_by_username(username) + if not db_record: + raise HTTPException( + status_code=404, detail=f"User has no auto_suite scheduled." + ) + return db_record + + +@router.post("/webhook/build-status", status_code=201) +def build_status_webhook( + ready_builds: BuildStatusWebhook, db: Session = Depends(get_db) +): + db_records = AutoSuiteService(db).get_by_build_data(ready_builds) + if db_records: + auto_schedule(db_records) + return db_records diff --git a/src/routes/presets.py b/src/routes/presets.py new file mode 100644 index 0000000..ba0fc34 --- /dev/null +++ b/src/routes/presets.py @@ -0,0 +1,98 @@ +from fastapi import APIRouter, HTTPException, Depends, Response +from sqlalchemy.orm import Session +import logging + +from services.helpers import get_token +from models import get_db +from services.presets import PresetsDatabaseException, PresetsService +from schemas.presets import PresetsSchema + +log = logging.getLogger(__name__) + +router = APIRouter( + prefix="/presets", + tags=["presets"], +) + + +@router.get("/", status_code=200) +def read_preset(username: str, name: str, db: Session = Depends(get_db)): + db_preset = PresetsService(db).get_by_username_and_name(username, name) + if not db_preset: + raise HTTPException(status_code=404, detail=f"Preset does not exist.") + return db_preset + + +@router.get("/list", status_code=200) +def read_preset(username: str, db: Session = Depends(get_db)): + db_presets = PresetsService(db).get_by_username(username) + if not db_presets: + raise HTTPException(status_code=404, detail=f"User has no presets saved.") + return db_presets + + +@router.post("/add", status_code=200) +def add_preset( + preset: PresetsSchema, + db: Session = Depends(get_db), + access_token: str = Depends(get_token), +): + if not access_token: + raise HTTPException( + status_code=401, + detail="You need to be logged in", + headers={"WWW-Authenticate": "Bearer"}, + ) + db_preset = PresetsService(db).get_by_username_and_name( + username=preset.username, preset_name=preset.name + ) + if db_preset: + raise HTTPException( + status_code=400, detail=f"Preset of this username & name already exists." + ) + return PresetsService(db).create(preset.model_dump()) + + +@router.put("/edit/{preset_id}", status_code=200) +def update_preset( + preset_id: int, + updated_data: PresetsSchema, + db: Session = Depends(get_db), + access_token: str = Depends(get_token), +): + if not access_token: + raise HTTPException( + status_code=401, + detail="You need to be logged in", + headers={"WWW-Authenticate": "Bearer"}, + ) + try: + return PresetsService(db).update( + preset_id, updated_data.model_dump(exclude_unset=True) + ) + except PresetsDatabaseException as exc: + raise HTTPException( + status_code=exc.code, + detail=str(exc), + ) + + +@router.delete("/delete/{preset_id}", status_code=204) +def delete_preset( + preset_id: int, + db: Session = Depends(get_db), + access_token: str = Depends(get_token), +): + if not access_token: + raise HTTPException( + status_code=401, + detail="You need to be logged in", + headers={"WWW-Authenticate": "Bearer"}, + ) + try: + PresetsService(db).delete(preset_id) + except PresetsDatabaseException as exc: + raise HTTPException( + status_code=exc.code, + detail=str(exc), + ) diff --git a/src/routes/suite.py b/src/routes/suite.py index e803879..2ac6fa3 100644 --- a/src/routes/suite.py +++ b/src/routes/suite.py @@ -21,4 +21,10 @@ def create_run( logs: bool = False, ): args = args.model_dump(by_alias=True) - return run(args, dry_run, logs, access_token) + if not access_token: + raise HTTPException( + status_code=401, + detail="You need to be logged in", + headers={"WWW-Authenticate": "Bearer"}, + ) + return run(args, dry_run, logs) diff --git a/src/schemas/auto_suite.py b/src/schemas/auto_suite.py new file mode 100644 index 0000000..55e7a17 --- /dev/null +++ b/src/schemas/auto_suite.py @@ -0,0 +1,33 @@ +from typing import Union +from pydantic import BaseModel, Field +from .suite import SuiteArgs + + +class BuildStatusWebhook(BaseModel): + # pylint: disable=too-few-public-methods + """ + Class for Build Status Webhook. + """ + status: str = Field(default="") + distro: Union[str, None] = Field(default="") + distro_version: Union[str, None] = Field(default="") + ref: Union[str, None] = Field(default="") + sha1: Union[str, None] = Field(default="") + flavor: Union[str, None] = Field(default="") + url: Union[str, None] = Field(default="") + + +class AutoSuiteSchema(BaseModel): + # pylint: disable=too-few-public-methods + """ + Class for Auto Suite Args. + """ + username: Union[str, None] = Field(default=None) + status: Union[str, None] = Field(default=None) + branch: Union[str, None] = Field(default=None) + distro: Union[str, None] = Field(default=None) + distro_version: Union[str, None] = Field(default=None) + flavor: Union[str, None] = Field(default=None) + suite: Union[str, None] = Field(default=None) + log_path: Union[str, None] = Field(default=None) + cmd: Union[SuiteArgs, None] = Field(default=None) \ No newline at end of file diff --git a/src/schemas/presets.py b/src/schemas/presets.py new file mode 100644 index 0000000..87f8075 --- /dev/null +++ b/src/schemas/presets.py @@ -0,0 +1,13 @@ +from pydantic import BaseModel, Field +from typing import Union + + +class PresetsSchema(BaseModel): + # pylint: disable=too-few-public-methods + """ + Class for Presets Schema. + """ + username: Union[str, None] = Field(default=None) + name: Union[str, None] = Field(default=None) + suite: Union[str, None] = Field(default=None) + cmd: Union[str, None] = Field(default=None) diff --git a/src/services/auto_suite.py b/src/services/auto_suite.py new file mode 100644 index 0000000..45fbbc4 --- /dev/null +++ b/src/services/auto_suite.py @@ -0,0 +1,64 @@ +from sqlalchemy.orm import Session +from models.auto_suite import AutoSuite +from schemas.auto_suite import BuildStatusWebhook +from services.suite import run + + +def auto_schedule(autosuite_records): + for auto_suite in autosuite_records: + run(auto_suite.cmd, dry_run=False, send_logs=True) + + +class AutoSuiteDatabaseException(Exception): + def __init__(self, message, code): + super().__init__(message) + self.code = code + + +class AutoSuiteService: + def __init__(self, db: Session) -> None: + self.db = db + + def get_by_username(self, username: str): + db_autosuite = ( + self.db.query(AutoSuite).filter(AutoSuite.username == username).all() + ) + return db_autosuite + + def get_by_build_data(self, build_data: BuildStatusWebhook): + distro = build_data.distro + distro_version = build_data.distro_version + flavor = build_data.flavor + branch = build_data.ref + db_preset = ( + self.db.query(AutoSuite) + .filter( + AutoSuite.distro == distro, + AutoSuite.distro_version == distro_version, + AutoSuite.flavor == flavor, + AutoSuite.branch == branch, + ) + .first() + ) + return db_preset + + def create(self, new_obj: dict): + new_autosuite = AutoSuite(**new_obj) + self.db.add(new_autosuite) + self.db.commit() + self.db.refresh(new_autosuite) + return new_autosuite + + def update(self, id: int, update_data: dict): + autosuite_query = self.db.query(AutoSuite).filter(AutoSuite.id == id) + db_autosuite = autosuite_query.first() + if not db_autosuite: + raise AutoSuiteDatabaseException( + "AutoSuite object does not exist - unable to update.", 404 + ) + autosuite_query.filter(AutoSuite.id == id).update( + update_data, synchronize_session=False + ) + self.db.commit() + self.db.refresh(db_autosuite) + return db_autosuite diff --git a/src/services/presets.py b/src/services/presets.py new file mode 100644 index 0000000..5af0db8 --- /dev/null +++ b/src/services/presets.py @@ -0,0 +1,60 @@ +from sqlalchemy.orm import Session +from models.presets import Presets + + +class PresetsDatabaseException(Exception): + def __init__(self, message, code): + super().__init__(message) + self.code = code + + +class PresetsService: + def __init__(self, db: Session) -> None: + self.db = db + + def get_by_username(self, username: str): + db_preset = self.db.query(Presets).filter(Presets.username == username).all() + return db_preset + + def get_by_username_and_name(self, username: str, preset_name: str): + db_preset = ( + self.db.query(Presets) + .filter(Presets.username == username, Presets.name == preset_name) + .first() + ) + return db_preset + + def get_by_id(self, preset_id: int): + db_preset = self.db.query(Presets).filter(Presets.id == preset_id).first() + return db_preset + + def create(self, preset: dict) -> Presets: + new_preset = Presets(**preset) + self.db.add(new_preset) + self.db.commit() + self.db.refresh(new_preset) + return new_preset + + def update(self, preset_id: int, update_data): + preset_query = self.db.query(Presets).filter(Presets.id == preset_id) + db_preset = preset_query.first() + if not db_preset: + raise PresetsDatabaseException( + "Presets does not exist - unable to update.", 404 + ) + preset_query.filter(Presets.id == preset_id).update( + update_data, synchronize_session=False + ) + self.db.commit() + self.db.refresh(db_preset) + return db_preset + + def delete(self, preset_id: int): + preset_query = self.db.query(Presets).filter(Presets.id == preset_id) + db_preset = preset_query.first() + if not db_preset: + raise PresetsDatabaseException( + "Presets does not exist - unable to delete.", 404 + ) + preset_query.delete(synchronize_session=False) + self.db.commit() diff --git a/src/services/suite.py b/src/services/suite.py index bfe09c1..bbd442d 100644 --- a/src/services/suite.py +++ b/src/services/suite.py @@ -7,17 +7,11 @@ log = logging.getLogger(__name__) -def run(args, dry_run: bool, send_logs: bool, access_token: str): +def run(args, dry_run: bool, send_logs: bool): """ Schedule a suite. :returns: Run details (dict) and logs (list). """ - if not access_token: - raise HTTPException( - status_code=401, - detail="You need to be logged in", - headers={"WWW-Authenticate": "Bearer"}, - ) try: args["--timestamp"] = datetime.now().strftime("%Y-%m-%d_%H:%M:%S") if dry_run: diff --git a/start_container.sh b/start_container.sh index 1dbb424..51c4a80 100644 --- a/start_container.sh +++ b/start_container.sh @@ -5,6 +5,7 @@ trap exit TERM HOST=${TEUTHOLOGY_API_SERVER_HOST:-"0.0.0.0"} PORT=${TEUTHOLOGY_API_SERVER_PORT:-"8080"} +alembic -x verbose=1 upgrade head cd /teuthology_api/src/