Skip to content

Commit

Permalink
fix: new migration for audit_log and fix failed backend tests
Browse files Browse the repository at this point in the history
  • Loading branch information
hamed-valiollahi committed Nov 15, 2024
1 parent c65b351 commit 82b8977
Show file tree
Hide file tree
Showing 3 changed files with 112 additions and 111 deletions.
121 changes: 11 additions & 110 deletions backend/lcfs/db/migrations/versions/2024-11-01-12-27_bf26425d2a14.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,5 @@
"""Database-Level Audit Logging with JSON Delta
Note:
As the table grows, automatic archiving (e.g., moving older logs to an archive table) and purging (e.g., deleting very old logs)
can be implemented in the future to maintain performance and manage storage efficiently.
Archiving:
- Create an `audit_log_archive` table with the same structure as `audit_log`.
- Use a scheduled job (e.g., with `pg_cron`) to move records older than a certain threshold (e.g., 1 month) from `audit_log` to `audit_log_archive`.
- Alternatively, consider creating date-based archive tables (e.g., audit_log_archive_2025_01) to organize logs by time periods.
Purging:
- Use a scheduled job (e.g., with `pg_cron`) to delete records older than a defined retention period (e.g., 1 years) from `audit_log_archive`.
Revision ID: bf26425d2a14
Revises: 1b4d0dcf70a8
Create Date: 2024-11-01 12:27:33.901648
Expand All @@ -33,114 +21,31 @@ def upgrade() -> None:
# Step 1: Create the audit_log table
op.create_table(
"audit_log",
sa.Column(
"audit_log_id",
sa.Integer(),
autoincrement=True,
nullable=False,
comment="Unique identifier for each audit log entry.",
),
sa.Column(
"table_name",
sa.Text(),
nullable=False,
comment="Name of the table where the action occurred.",
),
sa.Column(
"operation",
sa.Text(),
nullable=False,
comment="Type of operation: 'INSERT', 'UPDATE', or 'DELETE'.",
),
sa.Column(
"row_id",
postgresql.JSONB(astext_type=sa.Text()),
nullable=False,
comment="Primary key of the affected row, stored as JSONB to support composite keys.",
),
sa.Column(
"old_values",
postgresql.JSONB(astext_type=sa.Text()),
nullable=True,
comment="Previous values before the operation.",
),
sa.Column(
"new_values",
postgresql.JSONB(astext_type=sa.Text()),
nullable=True,
comment="New values after the operation.",
),
sa.Column(
"delta",
postgresql.JSONB(astext_type=sa.Text()),
nullable=True,
comment="JSONB delta of the changes.",
),
sa.Column("id", sa.BigInteger(), autoincrement=True, nullable=False),
sa.Column("table_name", sa.Text(), nullable=False),
sa.Column("operation", sa.Text(), nullable=False),
sa.Column("row_id", postgresql.JSONB(astext_type=sa.Text()), nullable=False),
sa.Column("old_values", postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column("new_values", postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column("delta", postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column(
"create_date",
sa.TIMESTAMP(timezone=True),
server_default=sa.text("now()"),
nullable=True,
comment="Timestamp when the audit log entry was created.",
),
sa.Column(
"create_user",
sa.String(),
nullable=True,
comment="User who created the audit log entry.",
),
sa.Column("create_user", sa.String(), nullable=True),
sa.Column(
"update_date",
sa.TIMESTAMP(timezone=True),
server_default=sa.text("now()"),
nullable=True,
comment="Timestamp when the audit log entry was last updated.",
),
sa.Column(
"update_user",
sa.String(),
nullable=True,
comment="User who last updated the audit log entry.",
),
sa.PrimaryKeyConstraint("audit_log_id", name=op.f("pk_audit_log")),
sa.UniqueConstraint(
"audit_log_id",
name=op.f("uq_audit_log_audit_log_id"),
),
comment="Audit log capturing changes to database tables.",
)

# Create indexes
op.create_index(
"idx_audit_log_table_name",
"audit_log",
["table_name"],
unique=False,
sa.Column("update_user", sa.String(), nullable=True),
sa.PrimaryKeyConstraint("id", name=op.f("pk_audit_log")),
)
op.create_index(
"idx_audit_log_operation",
"audit_log",
["operation"],
unique=False,
)
op.create_index(
"idx_audit_log_create_date",
"audit_log",
["create_date"],
unique=False,
)
op.create_index(
"idx_audit_log_create_user",
"audit_log",
["create_user"],
unique=False,
)
op.create_index(
"idx_audit_log_delta",
"audit_log",
["delta"],
postgresql_using="gin",
unique=False,
"idx_audit_log_table_name", "audit_log", ["table_name"], unique=False
)

# Step 2: Create JSONB_DIFF FUNCTION
Expand Down Expand Up @@ -323,9 +228,5 @@ def downgrade() -> None:
op.execute("DROP FUNCTION IF EXISTS jsonb_diff;")

# Step 1 Downgrade: Drop audit_log table
op.drop_index("idx_audit_log_delta", table_name="audit_log")
op.drop_index("idx_audit_log_create_user", table_name="audit_log")
op.drop_index("idx_audit_log_create_date", table_name="audit_log")
op.drop_index("idx_audit_log_operation", table_name="audit_log")
op.drop_index("idx_audit_log_table_name", table_name="audit_log")
op.drop_table("audit_log")
101 changes: 101 additions & 0 deletions backend/lcfs/db/migrations/versions/2024-11-15-21-05_1974af823b80.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,101 @@
"""Enhance audit_log: Rename id, add comments, enforce uniqueness, and create indexes.
Revision ID: 1974af823b80
Revises: b659816d0a86
Create Date: 2024-11-15 21:05:06.629584
"""

import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import postgresql

# revision identifiers, used by Alembic.
revision = "1974af823b80"
down_revision = "b659816d0a86"
branch_labels = None
depends_on = None


def upgrade():
# Step 1: Rename 'id' column to 'audit_log_id'
op.alter_column("audit_log", "id", new_column_name="audit_log_id")

# Step 2: Add comments to the table and columns
op.execute(
"COMMENT ON TABLE audit_log IS 'Audit log capturing changes to database tables.';"
)
op.execute(
"COMMENT ON COLUMN audit_log.audit_log_id IS 'Unique identifier for each audit log entry.';"
)
op.execute(
"COMMENT ON COLUMN audit_log.table_name IS 'Name of the table where the action occurred.';"
)
op.execute(
"COMMENT ON COLUMN audit_log.operation IS 'Type of operation: ''INSERT'', ''UPDATE'', or ''DELETE''.';"
)
op.execute(
"COMMENT ON COLUMN audit_log.row_id IS 'Primary key of the affected row, stored as JSONB to support composite keys.';"
)
op.execute(
"COMMENT ON COLUMN audit_log.old_values IS 'Previous values before the operation.';"
)
op.execute(
"COMMENT ON COLUMN audit_log.new_values IS 'New values after the operation.';"
)
op.execute("COMMENT ON COLUMN audit_log.delta IS 'JSONB delta of the changes.';")
op.execute(
"COMMENT ON COLUMN audit_log.create_date IS 'Timestamp when the audit log entry was created.';"
)
op.execute(
"COMMENT ON COLUMN audit_log.create_user IS 'User who created the audit log entry.';"
)
op.execute(
"COMMENT ON COLUMN audit_log.update_date IS 'Timestamp when the audit log entry was last updated.';"
)
op.execute(
"COMMENT ON COLUMN audit_log.update_user IS 'User who last updated the audit log entry.';"
)

# Step 3: Add unique constraint on 'audit_log_id'
op.create_unique_constraint(
"uq_audit_log_audit_log_id", "audit_log", ["audit_log_id"]
)

# Step 4: Create new indexes
op.create_index("idx_audit_log_operation", "audit_log", ["operation"])
op.create_index("idx_audit_log_create_date", "audit_log", ["create_date"])
op.create_index("idx_audit_log_create_user", "audit_log", ["create_user"])
op.create_index(
"idx_audit_log_delta", "audit_log", ["delta"], postgresql_using="gin"
)


def downgrade():
# Reverse the above operations

# Step 4: Drop new indexes
op.drop_index("idx_audit_log_delta", table_name="audit_log")
op.drop_index("idx_audit_log_create_user", table_name="audit_log")
op.drop_index("idx_audit_log_create_date", table_name="audit_log")
op.drop_index("idx_audit_log_operation", table_name="audit_log")

# Step 3: Drop unique constraint on 'audit_log_id'
op.drop_constraint("uq_audit_log_audit_log_id", "audit_log", type_="unique")

# Step 2: Remove comments
op.execute("COMMENT ON COLUMN audit_log.update_user IS NULL;")
op.execute("COMMENT ON COLUMN audit_log.update_date IS NULL;")
op.execute("COMMENT ON COLUMN audit_log.create_user IS NULL;")
op.execute("COMMENT ON COLUMN audit_log.create_date IS NULL;")
op.execute("COMMENT ON COLUMN audit_log.delta IS NULL;")
op.execute("COMMENT ON COLUMN audit_log.new_values IS NULL;")
op.execute("COMMENT ON COLUMN audit_log.old_values IS NULL;")
op.execute("COMMENT ON COLUMN audit_log.row_id IS NULL;")
op.execute("COMMENT ON COLUMN audit_log.operation IS NULL;")
op.execute("COMMENT ON COLUMN audit_log.table_name IS NULL;")
op.execute("COMMENT ON COLUMN audit_log.audit_log_id IS NULL;")
op.execute("COMMENT ON TABLE audit_log IS NULL;")

# Step 1: Rename 'audit_log_id' column back to 'id'
op.alter_column("audit_log", "audit_log_id", new_column_name="id")
1 change: 0 additions & 1 deletion backend/lcfs/tests/test_organization.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
from starlette import status

from lcfs.db.models.user.Role import RoleEnum
from lcfs.tests.audit_log.conftest import mock_user_role
from lcfs.web.api.organizations.schema import (
OrganizationBalanceResponseSchema,
OrganizationListSchema,
Expand Down

0 comments on commit 82b8977

Please sign in to comment.