Skip to content

Commit

Permalink
Merge pull request #1219 from bcgov/feat/hamed-audit-log-feature-1037
Browse files Browse the repository at this point in the history
Feat: Add Audit Log Feature for IDIR Administrators - 1037
  • Loading branch information
hamed-valiollahi authored Nov 15, 2024
2 parents 9b24c59 + 32123ba commit bea27fc
Show file tree
Hide file tree
Showing 27 changed files with 1,410 additions and 292 deletions.
101 changes: 101 additions & 0 deletions backend/lcfs/db/migrations/versions/2024-11-15-21-05_1974af823b80.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,101 @@
"""Enhance audit_log: Rename id, add comments, enforce uniqueness, and create indexes.
Revision ID: 1974af823b80
Revises: b659816d0a86
Create Date: 2024-11-15 21:05:06.629584
"""

import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import postgresql

# revision identifiers, used by Alembic.
revision = "1974af823b80"
down_revision = "b659816d0a86"
branch_labels = None
depends_on = None


def upgrade():
# Step 1: Rename 'id' column to 'audit_log_id'
op.alter_column("audit_log", "id", new_column_name="audit_log_id")

# Step 2: Add comments to the table and columns
op.execute(
"COMMENT ON TABLE audit_log IS 'Audit log capturing changes to database tables.';"
)
op.execute(
"COMMENT ON COLUMN audit_log.audit_log_id IS 'Unique identifier for each audit log entry.';"
)
op.execute(
"COMMENT ON COLUMN audit_log.table_name IS 'Name of the table where the action occurred.';"
)
op.execute(
"COMMENT ON COLUMN audit_log.operation IS 'Type of operation: ''INSERT'', ''UPDATE'', or ''DELETE''.';"
)
op.execute(
"COMMENT ON COLUMN audit_log.row_id IS 'Primary key of the affected row, stored as JSONB to support composite keys.';"
)
op.execute(
"COMMENT ON COLUMN audit_log.old_values IS 'Previous values before the operation.';"
)
op.execute(
"COMMENT ON COLUMN audit_log.new_values IS 'New values after the operation.';"
)
op.execute("COMMENT ON COLUMN audit_log.delta IS 'JSONB delta of the changes.';")
op.execute(
"COMMENT ON COLUMN audit_log.create_date IS 'Timestamp when the audit log entry was created.';"
)
op.execute(
"COMMENT ON COLUMN audit_log.create_user IS 'User who created the audit log entry.';"
)
op.execute(
"COMMENT ON COLUMN audit_log.update_date IS 'Timestamp when the audit log entry was last updated.';"
)
op.execute(
"COMMENT ON COLUMN audit_log.update_user IS 'User who last updated the audit log entry.';"
)

# Step 3: Add unique constraint on 'audit_log_id'
op.create_unique_constraint(
"uq_audit_log_audit_log_id", "audit_log", ["audit_log_id"]
)

# Step 4: Create new indexes
op.create_index("idx_audit_log_operation", "audit_log", ["operation"])
op.create_index("idx_audit_log_create_date", "audit_log", ["create_date"])
op.create_index("idx_audit_log_create_user", "audit_log", ["create_user"])
op.create_index(
"idx_audit_log_delta", "audit_log", ["delta"], postgresql_using="gin"
)


def downgrade():
# Reverse the above operations

# Step 4: Drop new indexes
op.drop_index("idx_audit_log_delta", table_name="audit_log")
op.drop_index("idx_audit_log_create_user", table_name="audit_log")
op.drop_index("idx_audit_log_create_date", table_name="audit_log")
op.drop_index("idx_audit_log_operation", table_name="audit_log")

# Step 3: Drop unique constraint on 'audit_log_id'
op.drop_constraint("uq_audit_log_audit_log_id", "audit_log", type_="unique")

# Step 2: Remove comments
op.execute("COMMENT ON COLUMN audit_log.update_user IS NULL;")
op.execute("COMMENT ON COLUMN audit_log.update_date IS NULL;")
op.execute("COMMENT ON COLUMN audit_log.create_user IS NULL;")
op.execute("COMMENT ON COLUMN audit_log.create_date IS NULL;")
op.execute("COMMENT ON COLUMN audit_log.delta IS NULL;")
op.execute("COMMENT ON COLUMN audit_log.new_values IS NULL;")
op.execute("COMMENT ON COLUMN audit_log.old_values IS NULL;")
op.execute("COMMENT ON COLUMN audit_log.row_id IS NULL;")
op.execute("COMMENT ON COLUMN audit_log.operation IS NULL;")
op.execute("COMMENT ON COLUMN audit_log.table_name IS NULL;")
op.execute("COMMENT ON COLUMN audit_log.audit_log_id IS NULL;")
op.execute("COMMENT ON TABLE audit_log IS NULL;")

# Step 1: Rename 'audit_log_id' column back to 'id'
op.alter_column("audit_log", "audit_log_id", new_column_name="id")
63 changes: 52 additions & 11 deletions backend/lcfs/db/models/audit/AuditLog.py
Original file line number Diff line number Diff line change
@@ -1,23 +1,64 @@
from lcfs.db.base import Auditable, BaseModel
from sqlalchemy import (
BigInteger,
Integer,
Column,
Text,
)
from sqlalchemy.dialects.postgresql import JSONB


class AuditLog(BaseModel, Auditable):
__tablename__ = "audit_log"
__table_args__ = {"comment": "Track changes in defined tables."}
"""
Audit log capturing changes to database tables.
As the table grows, consider implementing automatic archiving (e.g., moving older logs to an archive table)
and purging (e.g., deleting logs after a retention period) using tools like `pg_cron` or external schedulers.
id = Column(BigInteger, primary_key=True, autoincrement=True)
Archiving:
- Create an `audit_log_archive` table with the same structure as `audit_log`.
- Use a scheduled job (e.g., with `pg_cron`) to move records older than a certain threshold (e.g., 1 month) from `audit_log` to `audit_log_archive`.
- Alternatively, consider creating date-based archive tables (e.g., audit_log_archive_2025_01) to organize logs by time periods.
table_name = Column(Text, nullable=False)
operation = Column(Text, nullable=False)
Purging:
- Use a scheduled job (e.g., with `pg_cron`) to delete records older than a defined retention period (e.g., 1 year) from `audit_log_archive`.
"""

__tablename__ = "audit_log"
__table_args__ = {"comment": "Track changes in defined tables."}

# JSONB fields for row ID, old values, new values, and delta
row_id = Column(JSONB, nullable=False)
old_values = Column(JSONB, nullable=True)
new_values = Column(JSONB, nullable=True)
delta = Column(JSONB, nullable=True)
audit_log_id = Column(
Integer,
primary_key=True,
autoincrement=True,
comment="Unique identifier for each audit log entry.",
)
table_name = Column(
Text,
nullable=False,
comment="Name of the table where the action occurred.",
)
operation = Column(
Text,
nullable=False,
comment="Type of operation: 'INSERT', 'UPDATE', or 'DELETE'.",
)
row_id = Column(
JSONB,
nullable=False,
comment="Primary key of the affected row, stored as JSONB to support composite keys.",
)
old_values = Column(
JSONB,
nullable=True,
comment="Previous values before the operation.",
)
new_values = Column(
JSONB,
nullable=True,
comment="New values after the operation.",
)
delta = Column(
JSONB,
nullable=True,
comment="JSONB delta of the changes.",
)
39 changes: 0 additions & 39 deletions backend/lcfs/tests/audit_log/conftest.py

This file was deleted.

150 changes: 0 additions & 150 deletions backend/lcfs/tests/audit_log/test_audit_log.py

This file was deleted.

Loading

0 comments on commit bea27fc

Please sign in to comment.