Skip to content

Commit

Permalink
test: ensure sequences are compared in exodus tests (refactored seque…
Browse files Browse the repository at this point in the history
…nce dump code for it) (#411)
  • Loading branch information
vjeeva authored Mar 1, 2024
1 parent 0309c56 commit c95dc35
Show file tree
Hide file tree
Showing 3 changed files with 49 additions and 12 deletions.
13 changes: 1 addition & 12 deletions pgbelt/cmd/sync.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,18 +29,7 @@ async def _sync_sequences(
dst_logger: Logger,
) -> None:

# Note: When in an exodus migration with a non-public schema, the sequence names must be prefixed with the schema name.
# This may not be done by the user, so we must do it here.
proper_sequence_names = None
if targeted_sequences is not None:
proper_sequence_names = []
for seq in targeted_sequences:
if f"{schema}." not in seq:
proper_sequence_names.append(f"{schema}.{seq}")
else:
proper_sequence_names.append(seq)

seq_vals = await dump_sequences(src_pool, proper_sequence_names, schema, src_logger)
seq_vals = await dump_sequences(src_pool, targeted_sequences, schema, src_logger)
await load_sequences(dst_pool, seq_vals, dst_logger)


Expand Down
12 changes: 12 additions & 0 deletions pgbelt/util/postgres.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,18 @@ async def dump_sequences(
"""
)

# Note: When in an exodus migration with a non-public schema, the sequence names must be prefixed with the schema name.
# This may not be done by the user, so we must do it here.
proper_sequence_names = None
if targeted_sequences is not None:
proper_sequence_names = []
for seq in targeted_sequences:
if f"{schema}." not in seq:
proper_sequence_names.append(f"{schema}.{seq}")
else:
proper_sequence_names.append(seq)
targeted_sequences = proper_sequence_names

seq_vals = {}
final_seqs = []
# If we get a list of targeted sequences, we only want to dump whichever of those are found in the database and schema.
Expand Down
36 changes: 36 additions & 0 deletions tests/integration/test_integration.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,13 @@
from pgbelt.config.models import DbupgradeConfig

import asyncio
from asyncpg import create_pool

import pgbelt
import pytest

from typer import echo


async def _check_status(
configs: dict[str, DbupgradeConfig], src_dst_status: str, dst_src_status: str
Expand Down Expand Up @@ -316,8 +319,41 @@ async def _ensure_same_data(configs: dict[str, DbupgradeConfig]):
print(
f"Ensuring {setname} source and destination data for table {table} are the same..."
)

assert src_table_data[table] == dst_table_data[table]

# We also need to ensure the sequences are the same
# I'm using the same code as in the sync_sequences function to do this because it has
# all the logic to handle exodus-style migrations and target the right sequences.
src_pool, dst_pool = await asyncio.gather(
create_pool(configs[setname].src.pglogical_uri, min_size=1),
create_pool(configs[setname].dst.root_uri, min_size=1),
)
src_seq_vals = await pgbelt.util.postgres.dump_sequences(
src_pool,
configs[setname].sequences,
configs[setname].schema_name,
pgbelt.util.logs.get_logger(
configs[setname].db,
configs[setname].dc,
"integration-sequences.src",
),
)
dst_seq_vals = await pgbelt.util.postgres.dump_sequences(
dst_pool,
configs[setname].sequences,
configs[setname].schema_name,
pgbelt.util.logs.get_logger(
configs[setname].db,
configs[setname].dc,
"integration-sequences.dst",
),
)

print(
f"Ensuring {setname} source and destination sequences are the same..."
)
assert src_seq_vals == dst_seq_vals
else:
print(f"Ensuring {setname} source and destination dumps are the same...")
assert src_dumps_filtered[i] == dst_dumps_filtered[i]
Expand Down

0 comments on commit c95dc35

Please sign in to comment.