Skip to content

Commit

Permalink
Merge pull request #245 from dwreeves/update-model-2024
Browse files Browse the repository at this point in the history
(draft) update model for 2024 boating season
  • Loading branch information
dwreeves authored May 26, 2024
2 parents d5a77fe + 9310c63 commit b2910c9
Show file tree
Hide file tree
Showing 67 changed files with 1,539 additions and 1,333 deletions.
12 changes: 6 additions & 6 deletions .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -29,18 +29,18 @@ jobs:

steps:
- uses: actions/checkout@v2
- name: Python 3.10 Setup
- name: Python Setup
uses: actions/setup-python@v2
with:
python-version: "3.10"
python-version: "3.12"
- name: Install dependencies
run: |
sudo apt-get update
sudo apt-get install libpq-dev
python -m pip install --upgrade pip
python -m pip install -r requirements.txt
- name: Run linter
run: flake8 app tests alembic
pip install --upgrade uv
uv pip install --system -r requirements.txt
- name: Run pre-commit
run: pre-commit run -a
- name: Run Celery worker
run: flask celery worker &
env:
Expand Down
19 changes: 12 additions & 7 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4,16 +4,21 @@ repos:
rev: v2.3.0
hooks:
- id: check-yaml
args: [--allow-multiple-documents]
- id: end-of-file-fixer
- id: trailing-whitespace

- repo: https://github.com/PyCQA/flake8
rev: 7.0.0
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.4.5
hooks:
- id: flake8
# Run the linter.
- id: ruff
args: [--fix]
# Run the formatter.
- id: ruff-format

- repo: https://github.com/pycqa/isort
rev: 5.10.1
- repo: https://github.com/hadolint/hadolint
rev: v2.12.0
hooks:
- id: isort
entry: isort --sl --lines-after-imports=2
- id: hadolint-docker
2 changes: 0 additions & 2 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
FROM python:3.12

MAINTAINER Daniel Reeves "[email protected]"

ADD --chmod=755 https://astral.sh/uv/install.sh /install.sh
RUN /install.sh && rm /install.sh

Expand Down
3 changes: 2 additions & 1 deletion alembic/env.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,7 @@ def run_migrations_online():
In this scenario we need to create an Engine and associate a connection
with the context.
"""

# If you use Alembic revision's --autogenerate flag this function will
# prevent Alembic from creating an empty migration file if nothing changed.
# Source: https://alembic.sqlalchemy.org/en/latest/cookbook.html
Expand All @@ -80,7 +81,7 @@ def process_revision_directives(context, revision, directives):
context.configure(
connection=connection,
target_metadata=target_metadata,
process_revision_directives=process_revision_directives
process_revision_directives=process_revision_directives,
)

with context.begin_transaction():
Expand Down
62 changes: 33 additions & 29 deletions alembic/versions/rev001.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
Create Date: 2022-01-22 15:49:40.837695
"""

import sqlalchemy as sa
from sqlalchemy import schema
from sqlalchemy.engine.reflection import Inspector
Expand All @@ -17,7 +18,7 @@


# revision identifiers, used by Alembic.
revision = '016fff145273'
revision = "016fff145273"
down_revision = None
branch_labels = None
depends_on = None
Expand All @@ -35,44 +36,47 @@ def upgrade():
# - model_outputs
# These are rewritten each time; their data doesn't need to be persisted.

if 'boathouses' not in tables:
op.execute(schema.CreateSequence(schema.Sequence('boathouses_id_seq')))
if "boathouses" not in tables:
op.execute(schema.CreateSequence(schema.Sequence("boathouses_id_seq")))
op.create_table(
'boathouses',
"boathouses",
sa.Column(
'id', sa.Integer(), autoincrement=True, nullable=False,
server_default=sa.text("nextval('boathouses_id_seq'::regclass)")
"id",
sa.Integer(),
autoincrement=True,
nullable=False,
server_default=sa.text("nextval('boathouses_id_seq'::regclass)"),
),
sa.Column('boathouse', sa.String(length=255), nullable=False),
sa.Column('reach', sa.Integer(), nullable=True),
sa.Column('latitude', sa.Numeric(), nullable=True),
sa.Column('longitude', sa.Numeric(), nullable=True),
sa.Column('overridden', sa.Boolean(), nullable=True),
sa.Column('reason', sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint('boathouse')
sa.Column("boathouse", sa.String(length=255), nullable=False),
sa.Column("reach", sa.Integer(), nullable=True),
sa.Column("latitude", sa.Numeric(), nullable=True),
sa.Column("longitude", sa.Numeric(), nullable=True),
sa.Column("overridden", sa.Boolean(), nullable=True),
sa.Column("reason", sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint("boathouse"),
)
with open(QUERIES_DIR + '/override_event_triggers_v1.sql', 'r') as f:
with open(QUERIES_DIR + "/override_event_triggers_v1.sql", "r") as f:
sql = sa.text(f.read())
conn.execute(sql)
if 'live_website_options' not in tables:
if "live_website_options" not in tables:
op.create_table(
'live_website_options',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('flagging_message', sa.Text(), nullable=True),
sa.Column('boating_season', sa.Boolean(), nullable=False),
sa.PrimaryKeyConstraint('id')
"live_website_options",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("flagging_message", sa.Text(), nullable=True),
sa.Column("boating_season", sa.Boolean(), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
if 'override_history' not in tables:
if "override_history" not in tables:
op.create_table(
'override_history',
sa.Column('time', sa.TIMESTAMP(), nullable=True),
sa.Column('boathouse', sa.TEXT(), nullable=True),
sa.Column('overridden', sa.BOOLEAN(), nullable=True),
sa.Column('reason', sa.TEXT(), nullable=True)
"override_history",
sa.Column("time", sa.TIMESTAMP(), nullable=True),
sa.Column("boathouse", sa.TEXT(), nullable=True),
sa.Column("overridden", sa.BOOLEAN(), nullable=True),
sa.Column("reason", sa.TEXT(), nullable=True),
)


def downgrade():
op.drop_table('live_website_options')
op.drop_table('boathouses')
op.drop_table('override_history')
op.drop_table("live_website_options")
op.drop_table("boathouses")
op.drop_table("override_history")
80 changes: 42 additions & 38 deletions alembic/versions/rev002.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
Create Date: 2022-01-22 17:03:23.094306
"""

import sqlalchemy as sa
from sqlalchemy.engine.reflection import Inspector

Expand All @@ -14,8 +15,8 @@


# revision identifiers, used by Alembic.
revision = '39a4e575f68c'
down_revision = '016fff145273'
revision = "39a4e575f68c"
down_revision = "016fff145273"
branch_labels = None
depends_on = None

Expand All @@ -27,65 +28,68 @@ def upgrade():

# Create reach association table
op.create_table(
'reach',
sa.Column('id', sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('id')
"reach",
sa.Column("id", sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("id"),
)

# We need to make sure that we can add the foreign key constraint to the
# boathouse relation. If the boathouse table is already populated, this
# necessitates running this now before adding the constraint.
num_boathouses = conn.execute(sa.text('select * from boathouses;')).scalar()
num_boathouses = conn.execute(sa.text("select * from boathouses;")).scalar()
if num_boathouses is not None and num_boathouses > 0:
with open(QUERIES_DIR + '/define_reach.sql', 'r') as f:
with open(QUERIES_DIR + "/define_reach.sql", "r") as f:
sql = sa.text(f.read())
conn.execute(sql)

# Migrate predictions
# Technically speaking, the types won't match the SQLA schema because Pandas
# overwrites the tables.
if 'model_outputs' in tables:
op.alter_column('model_outputs', 'reach', new_column_name='reach_id')
op.drop_column('model_outputs', 'log_odds')
op.rename_table('model_outputs', 'prediction')
if "model_outputs" in tables:
op.alter_column("model_outputs", "reach", new_column_name="reach_id")
op.drop_column("model_outputs", "log_odds")
op.rename_table("model_outputs", "prediction")
else:
op.create_table(
'prediction',
sa.Column('reach_id', sa.Integer(), nullable=False),
sa.Column('time', sa.DateTime(), nullable=False),
sa.Column('probability', sa.Numeric(), nullable=True),
sa.Column('safe', sa.Boolean(), nullable=True),
sa.ForeignKeyConstraint(['reach_id'], ['reach.id'], ),
sa.PrimaryKeyConstraint('reach_id', 'time')
"prediction",
sa.Column("reach_id", sa.Integer(), nullable=False),
sa.Column("time", sa.DateTime(), nullable=False),
sa.Column("probability", sa.Numeric(), nullable=True),
sa.Column("safe", sa.Boolean(), nullable=True),
sa.ForeignKeyConstraint(
["reach_id"],
["reach.id"],
),
sa.PrimaryKeyConstraint("reach_id", "time"),
)

# Migrate override history.
op.alter_column('override_history', 'boathouse', new_column_name='boathouse_name')
op.alter_column("override_history", "boathouse", new_column_name="boathouse_name")

# Migrate website options.
op.rename_table('live_website_options', 'website_options')
op.rename_table("live_website_options", "website_options")

# Migrate boathouse table
op.alter_column('boathouses', 'boathouse', new_column_name='name')
op.alter_column('boathouses', 'reach', new_column_name='reach_id')
op.alter_column('boathouses', 'overridden', server_default='f')
op.create_unique_constraint(None, 'boathouses', ['name'])
op.create_foreign_key(None, 'boathouses', 'reach', ['reach_id'], ['id'])
op.rename_table('boathouses', 'boathouse')
with open(QUERIES_DIR + '/override_event_triggers_v2.sql', 'r') as f:
op.alter_column("boathouses", "boathouse", new_column_name="name")
op.alter_column("boathouses", "reach", new_column_name="reach_id")
op.alter_column("boathouses", "overridden", server_default="f")
op.create_unique_constraint(None, "boathouses", ["name"])
op.create_foreign_key(None, "boathouses", "reach", ["reach_id"], ["id"])
op.rename_table("boathouses", "boathouse")
with open(QUERIES_DIR + "/override_event_triggers_v2.sql", "r") as f:
sql = sa.text(f.read())
conn.execute(sql)


def downgrade():
op.rename_table('boathouse', 'boathouses')
op.drop_constraint(None, 'boathouses', type_='foreignkey')
op.drop_constraint(None, 'boathouses', type_='unique')
op.alter_column('boathouses', 'name', new_column_name='boathouse')
op.alter_column('boathouses', 'reach_id', new_column_name='reach')
op.alter_column('boathouses', 'overridden', server_default=None)

op.drop_table('model_outputs')
op.drop_table('prediction')
op.drop_table('reach')
op.rename_table("boathouse", "boathouses")
op.drop_constraint(None, "boathouses", type_="foreignkey")
op.drop_constraint(None, "boathouses", type_="unique")
op.alter_column("boathouses", "name", new_column_name="boathouse")
op.alter_column("boathouses", "reach_id", new_column_name="reach")
op.alter_column("boathouses", "overridden", server_default=None)

op.drop_table("model_outputs")
op.drop_table("prediction")
op.drop_table("reach")
9 changes: 5 additions & 4 deletions alembic/versions/rev003.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,19 +6,20 @@
Create Date: 2023-04-17 17:42:44.755320
"""

from alembic import op


# revision identifiers, used by Alembic.
revision = '793fab3b5438'
down_revision = '39a4e575f68c'
revision = "793fab3b5438"
down_revision = "39a4e575f68c"
branch_labels = None
depends_on = None


def upgrade():
op.alter_column('prediction', 'probability', new_column_name='predicted_ecoli_cfu_100ml')
op.alter_column("prediction", "probability", new_column_name="predicted_ecoli_cfu_100ml")


def downgrade():
op.alter_column('prediction', 'predicted_ecoli_cfu_100ml', new_column_name='probability')
op.alter_column("prediction", "predicted_ecoli_cfu_100ml", new_column_name="probability")
9 changes: 5 additions & 4 deletions alembic/versions/rev004.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,19 +6,20 @@
Create Date: 2023-05-21 12:57:25.545426
"""

from alembic import op


# revision identifiers, used by Alembic.
revision = 'e433f34dd4bd'
down_revision = '793fab3b5438'
revision = "e433f34dd4bd"
down_revision = "793fab3b5438"
branch_labels = None
depends_on = None


def upgrade():
op.alter_column('prediction', 'predicted_ecoli_cfu_100ml', new_column_name='probability')
op.alter_column("prediction", "predicted_ecoli_cfu_100ml", new_column_name="probability")


def downgrade():
op.alter_column('prediction', 'probability', new_column_name='predicted_ecoli_cfu_100ml')
op.alter_column("prediction", "probability", new_column_name="predicted_ecoli_cfu_100ml")
2 changes: 1 addition & 1 deletion app/README.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# `/app` Content
# `/app` Content

- `/blueprints`: custom endpoints for the flagging website (all the logic that helps to render the HTML pages or JSONs.)
- `/data`: database, predictive models, and functions to retrieve live data.
Expand Down
19 changes: 6 additions & 13 deletions app/admin/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,9 @@ class BaseView(_BaseView):

def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if self.endpoint != 'admin':
if self.endpoint != "admin":
self.url = self.url or self.endpoint
self.endpoint = f'admin_{self.endpoint}'
self.endpoint = f"admin_{self.endpoint}"

def is_accessible(self) -> bool:
return basic_auth.authenticate()
Expand All @@ -29,23 +29,16 @@ def inaccessible_callback(self, name, **kwargs):

class ModelView(sqla.ModelView, BaseView):
"""Base Admin view for SQLAlchemy models."""

can_export = True
export_types = ['csv']
export_types = ["csv"]
create_modal = True
edit_modal = True

def __init__(
self,
model,
session,
*args,
ignore_columns: List[str] = None,
**kwargs
):
def __init__(self, model, session, *args, ignore_columns: List[str] = None, **kwargs):
# Show all columns in form except any in `ignore_columns`
self.column_list = [
c.key for c in model.__table__.columns
if c.key not in (ignore_columns or [])
c.key for c in model.__table__.columns if c.key not in (ignore_columns or [])
]
self.form_columns = self.column_list
super().__init__(model, session, *args, **kwargs)
Expand Down
Loading

0 comments on commit b2910c9

Please sign in to comment.