From 03d4c2d190f770bbc21c98fd98923de8680d2c96 Mon Sep 17 00:00:00 2001 From: Andy Salnikov Date: Mon, 15 Apr 2024 20:01:14 -0700 Subject: [PATCH] Add trivial unit test for database instantiation and querying empty chunks table. --- .github/workflows/build.yaml | 67 +++++ python/lsst/__init__.py | 24 ++ python/lsst/dax/__init__.py | 24 ++ python/lsst/dax/ppdb/__init__.py | 1 + python/lsst/dax/ppdb/sql/_ppdb_sql.py | 111 ++++++--- python/lsst/dax/ppdb/tests/__init__.py | 22 ++ python/lsst/dax/ppdb/tests/_ppdb.py | 65 +++++ tests/config/schema.yaml | 327 +++++++++++++++++++++++++ tests/test_ppdbSql.py | 98 ++++++++ 9 files changed, 704 insertions(+), 35 deletions(-) create mode 100644 .github/workflows/build.yaml create mode 100644 python/lsst/__init__.py create mode 100644 python/lsst/dax/__init__.py create mode 100644 python/lsst/dax/ppdb/tests/__init__.py create mode 100644 python/lsst/dax/ppdb/tests/_ppdb.py create mode 100644 tests/config/schema.yaml create mode 100644 tests/test_ppdbSql.py diff --git a/.github/workflows/build.yaml b/.github/workflows/build.yaml new file mode 100644 index 0000000..7feeb2f --- /dev/null +++ b/.github/workflows/build.yaml @@ -0,0 +1,67 @@ +name: build_and_test + +on: + push: + branches: + - main + pull_request: + +jobs: + build_and_test: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["3.11", "3.12"] + + steps: + - uses: actions/checkout@v3 + with: + # Need to clone everything for the git tags. + fetch-depth: 0 + + - uses: conda-incubator/setup-miniconda@v2 + with: + python-version: ${{ matrix.python-version }} + channels: conda-forge,defaults + channel-priority: strict + show-channel-urls: true + miniforge-variant: Mambaforge + use-mamba: true + + - name: Update pip/wheel infrastructure + shell: bash -l {0} + run: | + mamba install -y -q pip wheel + pip install uv + + - name: Install dependencies + shell: bash -l {0} + run: | + uv pip install -r requirements.txt + + # We have two cores so we can speed up the testing with xdist + - name: Install pytest packages + shell: bash -l {0} + run: | + uv pip install \ + pytest pytest-xdist pytest-cov + + - name: List installed packages + shell: bash -l {0} + run: | + conda list + pip list -v + + - name: Build and install + shell: bash -l {0} + run: | + uv pip install -v --no-deps -e . + + - name: Run tests + shell: bash -l {0} + run: | + pytest -r a -v -n 3 --cov=lsst.dax.ppdb --cov=tests --cov-report=xml --cov-report=term --cov-branch + - name: Upload coverage to codecov + uses: codecov/codecov-action@v2 + with: + file: ./coverage.xml diff --git a/python/lsst/__init__.py b/python/lsst/__init__.py new file mode 100644 index 0000000..4ace447 --- /dev/null +++ b/python/lsst/__init__.py @@ -0,0 +1,24 @@ +# This file is part of dax_ppdb. +# +# Developed for the LSST Data Management System. +# This product includes software developed by the LSST Project +# (https://www.lsst.org). +# See the COPYRIGHT file at the top-level directory of this distribution +# for details of code ownership. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +import pkgutil + +__path__ = pkgutil.extend_path(__path__, __name__) diff --git a/python/lsst/dax/__init__.py b/python/lsst/dax/__init__.py new file mode 100644 index 0000000..4ace447 --- /dev/null +++ b/python/lsst/dax/__init__.py @@ -0,0 +1,24 @@ +# This file is part of dax_ppdb. +# +# Developed for the LSST Data Management System. +# This product includes software developed by the LSST Project +# (https://www.lsst.org). +# See the COPYRIGHT file at the top-level directory of this distribution +# for details of code ownership. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +import pkgutil + +__path__ = pkgutil.extend_path(__path__, __name__) diff --git a/python/lsst/dax/ppdb/__init__.py b/python/lsst/dax/ppdb/__init__.py index 2cb6bd2..4068017 100644 --- a/python/lsst/dax/ppdb/__init__.py +++ b/python/lsst/dax/ppdb/__init__.py @@ -19,5 +19,6 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . +from .config import * from .ppdb import * from .version import * # Generated by sconsUtils diff --git a/python/lsst/dax/ppdb/sql/_ppdb_sql.py b/python/lsst/dax/ppdb/sql/_ppdb_sql.py index 590fae6..be369a9 100644 --- a/python/lsst/dax/ppdb/sql/_ppdb_sql.py +++ b/python/lsst/dax/ppdb/sql/_ppdb_sql.py @@ -31,13 +31,18 @@ from typing import Any import astropy.time +import felis.datamodel import sqlalchemy import yaml -from felis.datamodel import Schema, SchemaVersion -from felis.metadata import MetaDataBuilder -from lsst.dax.apdb import ApdbMetadata, ApdbTableData, IncompatibleVersionError, ReplicaChunk, VersionTuple -from lsst.dax.apdb.sql.apdbMetadataSql import ApdbMetadataSql -from lsst.dax.apdb.sql.apdbSqlSchema import GUID +from lsst.dax.apdb import ( + ApdbMetadata, + ApdbTableData, + IncompatibleVersionError, + ReplicaChunk, + VersionTuple, + schema_model, +) +from lsst.dax.apdb.sql import ApdbMetadataSql, ModelToSql from lsst.resources import ResourcePath from lsst.utils.iteration import chunk_iterable from sqlalchemy import sql @@ -127,13 +132,13 @@ def __init__(self, config: PpdbConfig): def init_database( cls, db_url: str, - schema_name: str | None, - schema_file: str | None, - felis_schema: str | None, - use_connection_pool: bool, - isolation_level: str | None, - connection_timeout: float | None, - drop: bool, + schema_file: str | None = None, + schema_name: str | None = None, + felis_schema: str | None = None, + use_connection_pool: bool = True, + isolation_level: str | None = None, + connection_timeout: float | None = None, + drop: bool = False, ) -> PpdbConfig: """Initialize PPDB database. @@ -216,40 +221,76 @@ def _read_schema( table for table in schema_dict["tables"] if table["name"] not in ("DiaObjectLast",) ] schema_dict["tables"] = filtered_tables - schema = Schema.model_validate(schema_dict) + dm_schema = felis.datamodel.Schema.model_validate(schema_dict) + schema = schema_model.Schema.from_felis(dm_schema) - # Replace schema name with a configured one, this helps in case we - # want to use default schema on database side. + # Replace schema name with a configured one, just in case it may be + # used by someone. if schema_name: schema.name = schema_name - metadata = MetaDataBuilder(schema).build() - else: - builder = MetaDataBuilder(schema, apply_schema_to_metadata=False, apply_schema_to_tables=False) - metadata = builder.build() - # Add table for replication support. - sqlalchemy.schema.Table( - "PpdbReplicaChunk", - metadata, - sqlalchemy.schema.Column( - "apdb_replica_chunk", sqlalchemy.BigInteger, primary_key=True, autoincrement=False + # Add replica chunk table. + table_name = "PpdbReplicaChunk" + columns = [ + schema_model.Column( + name="apdb_replica_chunk", + id=f"#{table_name}.apdb_replica_chunk", + datatype=felis.datamodel.DataType.LONG, + ), + schema_model.Column( + name="last_update_time", + id=f"#{table_name}.last_update_time", + datatype=felis.datamodel.DataType.TIMESTAMP, + nullable=False, + ), + schema_model.Column( + name="unique_id", + id=f"#{table_name}.unique_id", + datatype=schema_model.ExtraDataTypes.UUID, + nullable=False, + ), + schema_model.Column( + name="replica_time", + id=f"#{table_name}.replica_time", + datatype=felis.datamodel.DataType.TIMESTAMP, + nullable=False, + ), + ] + indices = [ + schema_model.Index( + name="PpdbInsertId_idx_last_update_time", + id="#PpdbInsertId_idx_last_update_time", + columns=[columns[1]], + ), + schema_model.Index( + name="PpdbInsertId_idx_replica_time", + id="#PpdbInsertId_idx_replica_time", + columns=[columns[3]], ), - sqlalchemy.schema.Column("last_update_time", sqlalchemy.types.TIMESTAMP, nullable=False), - sqlalchemy.schema.Column("unique_id", GUID, nullable=False), - sqlalchemy.schema.Column("replica_time", sqlalchemy.types.TIMESTAMP, nullable=False), - sqlalchemy.schema.Index("PpdbInsertId_idx_last_update_time", "last_update_time"), - sqlalchemy.schema.Index("PpdbInsertId_idx_replica_time", "replica_time"), - schema=schema_name, + ] + + # Add table for replication support. + chunks_table = schema_model.Table( + name=table_name, + id=f"#{table_name}", + columns=columns, + primary_key=[columns[0]], + indexes=indices, + constraints=[], ) + schema.tables.append(chunks_table) - if isinstance(schema.version, str): - version = VersionTuple.fromString(schema.version) - elif isinstance(schema.version, SchemaVersion): + if schema.version is not None: version = VersionTuple.fromString(schema.version.current) else: # Missing schema version is identical to 0.1.0 version = VersionTuple(0, 1, 0) + metadata = sqlalchemy.schema.MetaData(schema=schema_name) + + converter = ModelToSql(metadata=metadata) + converter.make_tables(schema.tables) + return metadata, version @classmethod @@ -322,7 +363,7 @@ def _make_engine(cls, config: PpdbSqlConfig) -> sqlalchemy.engine.Engine: kw["poolclass"] = NullPool if config.isolation_level is not None: kw.update(isolation_level=config.isolation_level) - elif config.db_url.startswith("sqlite"): # type: ignore + elif config.db_url.startswith("sqlite"): # Use READ_UNCOMMITTED as default value for sqlite. kw.update(isolation_level="READ_UNCOMMITTED") if config.connection_timeout is not None: diff --git a/python/lsst/dax/ppdb/tests/__init__.py b/python/lsst/dax/ppdb/tests/__init__.py new file mode 100644 index 0000000..80981ba --- /dev/null +++ b/python/lsst/dax/ppdb/tests/__init__.py @@ -0,0 +1,22 @@ +# This file is part of dax_ppdb. +# +# Developed for the LSST Data Management System. +# This product includes software developed by the LSST Project +# (http://www.lsst.org). +# See the COPYRIGHT file at the top-level directory of this distribution +# for details of code ownership. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +from ._ppdb import * diff --git a/python/lsst/dax/ppdb/tests/_ppdb.py b/python/lsst/dax/ppdb/tests/_ppdb.py new file mode 100644 index 0000000..a964690 --- /dev/null +++ b/python/lsst/dax/ppdb/tests/_ppdb.py @@ -0,0 +1,65 @@ +# This file is part of dax_ppdb. +# +# Developed for the LSST Data Management System. +# This product includes software developed by the LSST Project +# (http://www.lsst.org). +# See the COPYRIGHT file at the top-level directory of this distribution +# for details of code ownership. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +from __future__ import annotations + +__all__ = ["PpdbTest"] + +import unittest +from abc import ABC, abstractmethod +from typing import TYPE_CHECKING, Any + +from ..config import PpdbConfig +from ..ppdb import Ppdb + +if TYPE_CHECKING: + + class TestCaseMixin(unittest.TestCase): + """Base class for mixin test classes that use TestCase methods.""" + +else: + + class TestCaseMixin: + """Do-nothing definition of mixin base class for regular execution.""" + + +class PpdbTest(TestCaseMixin, ABC): + """Base class for Ppdb tests that can be specialized for concrete + implementation. + + This can only be used as a mixin class for a unittest.TestCase and it + calls various assert methods. + """ + + @abstractmethod + def make_instance(self, **kwargs: Any) -> PpdbConfig: + """Make database instance and return configuration for it.""" + raise NotImplementedError() + + def test_empty_db(self) -> None: + """Test for instantiation a database and making queries on empty + database. + """ + config = self.make_instance() + ppdb = Ppdb.from_config(config) + chunks = ppdb.get_replica_chunks() + if chunks is not None: + self.assertEqual(len(chunks), 0) diff --git a/tests/config/schema.yaml b/tests/config/schema.yaml new file mode 100644 index 0000000..12b1eb2 --- /dev/null +++ b/tests/config/schema.yaml @@ -0,0 +1,327 @@ +--- +name: "ApdbSchema" +"@id": "#apdbSchema" +version: "0.1.1" +tables: +- name: metadata + "@id": "#metadata" + description: Table containing various metadata key/value pairs for APDB. + columns: + - name: name + "@id": "#metadata.name" + datatype: text + length: 1024 + nullable: false + description: Name or key for a metadata item. + - name: value + "@id": "#metadata.value" + datatype: text + length: 65535 + nullable: false + description: Content of the metadata item in string representation. + primaryKey: "#metadata.name" +- name: DiaObject + "@id": "#DiaObject" + description: The DiaObject table contains descriptions of the astronomical objects + detected on one or more difference images. + columns: + - name: diaObjectId + "@id": "#DiaObject.diaObjectId" + datatype: long + nullable: false + description: Unique id. + mysql:datatype: BIGINT + ivoa:ucd: meta.id;src + - name: validityStart + "@id": "#DiaObject.validityStart" + datatype: timestamp + length: 6 + nullable: false + description: Time when validity of this diaObject starts. + mysql:datatype: DATETIME + - name: validityEnd + "@id": "#DiaObject.validityEnd" + datatype: timestamp + length: 6 + nullable: true + description: Time when validity of this diaObject ends. + mysql:datatype: DATETIME + - name: ra + "@id": "#DiaObject.ra" + datatype: double + nullable: false + description: RA-coordinate of the position of the object at time radecTai. + mysql:datatype: DOUBLE + fits:tunit: deg + ivoa:ucd: pos.eq.ra + - name: dec + "@id": "#DiaObject.dec" + datatype: double + nullable: false + description: Dec-coordinate of the position of the object at time radecTai. + mysql:datatype: DOUBLE + fits:tunit: deg + ivoa:ucd: pos.eq.dec + - name: parallax + "@id": "#DiaObject.parallax" + datatype: float + description: Parallax. + mysql:datatype: FLOAT + - name: lastNonForcedSource + "@id": "#DiaObject.lastNonForcedSource" + datatype: timestamp + length: 6 + nullable: false + description: Last time when non-forced DIASource was seen for this object. + mysql:datatype: DATETIME + - name: nDiaSources + "@id": "#DiaObject.nDiaSources" + datatype: int + nullable: false + description: Total number of DiaSources associated with this DiaObject. + primaryKey: + - "#DiaObject.diaObjectId" + - "#DiaObject.validityStart" + indexes: + - name: IDX_DiaObject_validityStart + "@id": "#IDX_DiaObject_validityStart" + columns: + - "#DiaObject.validityStart" +- name: SSObject + "@id": "#SSObject" + description: The SSObject table contains description of the Solar System (moving) + Objects. + columns: + - name: ssObjectId + "@id": "#SSObject.ssObjectId" + datatype: long + nullable: false + description: Unique identifier. + mysql:datatype: BIGINT + ivoa:ucd: meta.id;src + - name: arc + "@id": "#SSObject.arc" + datatype: float + description: Arc of observation. + mysql:datatype: FLOAT + fits:tunit: day + - name: flags + "@id": "#SSObject.flags" + datatype: long + nullable: false + description: Flags, bitwise OR tbd. + value: 0 + mysql:datatype: BIGINT + ivoa:ucd: meta.code + primaryKey: "#SSObject.ssObjectId" +- name: DiaSource + "@id": "#DiaSource" + description: Table to store 'difference image sources'; - sources detected at + SNR >=5 on difference images. + columns: + - name: diaSourceId + "@id": "#DiaSource.diaSourceId" + datatype: long + nullable: false + description: Unique id. + mysql:datatype: BIGINT + ivoa:ucd: meta.id;obs.image + - name: ccdVisitId + "@id": "#DiaSource.ccdVisitId" + datatype: long + nullable: false + description: Id of the ccdVisit where this diaSource was measured. Note that we + are allowing a diaSource to belong to multiple amplifiers, but it may not span + multiple ccds. + mysql:datatype: BIGINT + ivoa:ucd: meta.id;obs.image + - name: diaObjectId + "@id": "#DiaSource.diaObjectId" + datatype: long + nullable: true + description: Id of the diaObject this source was associated with, if any. If not, + it is set to NULL (each diaSource will be associated with either a diaObject + or ssObject). + mysql:datatype: BIGINT + ivoa:ucd: meta.id;src + - name: ssObjectId + "@id": "#DiaSource.ssObjectId" + datatype: long + nullable: true + description: Id of the ssObject this source was associated with, if any. If not, + it is set to NULL (each diaSource will be associated with either a diaObject + or ssObject). + mysql:datatype: BIGINT + ivoa:ucd: meta.id;src + - name: parentDiaSourceId + "@id": "#DiaSource.parentDiaSourceId" + datatype: long + description: Id of the parent diaSource this diaSource has been deblended from, + if any. + mysql:datatype: BIGINT + ivoa:ucd: meta.id;src + - name: ra + "@id": "#DiaSource.ra" + datatype: double + nullable: false + description: RA-coordinate of the center of this diaSource. + mysql:datatype: DOUBLE + fits:tunit: deg + ivoa:ucd: pos.eq.ra + - name: dec + "@id": "#DiaSource.dec" + datatype: double + nullable: false + description: " Decl-coordinate of the center of this diaSource." + mysql:datatype: DOUBLE + fits:tunit: deg + ivoa:ucd: pos.eq.dec + - name: ssObjectReassocTime + "@id": "#DiaSource.ssObjectReassocTime" + datatype: timestamp + length: 6 + description: Time when this diaSource was reassociated from diaObject to ssObject + (if such reassociation happens, otherwise NULL). + mysql:datatype: DATETIME + - name: midpointMjdTai + "@id": "#DiaSource.midpointMjdTai" + datatype: double + nullable: false + description: Effective mid-exposure time for this diaSource. + mysql:datatype: DOUBLE + fits:tunit: d + ivoa:ucd: time.epoch + - name: flags + "@id": "#DiaSource.flags" + datatype: long + nullable: false + description: Flags, bitwise OR tbd. + value: 0 + mysql:datatype: BIGINT + ivoa:ucd: meta.code + primaryKey: "#DiaSource.diaSourceId" + indexes: + - name: IDX_DiaSource_ccdVisitId + "@id": "#IDX_DiaSource_ccdVisitId" + columns: + - "#DiaSource.ccdVisitId" + - name: IDX_DiaSource_diaObjectId + "@id": "#IDX_DiaSource_diaObjectId" + columns: + - "#DiaSource.diaObjectId" + - name: IDX_DiaSource_ssObjectId + "@id": "#IDX_DiaSource_ssObjectId" + columns: + - "#DiaSource.ssObjectId" +- name: DiaForcedSource + "@id": "#DiaForcedSource" + description: Forced-photometry source measurement on an individual difference Exposure + based on a Multifit shape model derived from a deep detection. + columns: + - name: diaObjectId + "@id": "#DiaForcedSource.diaObjectId" + datatype: long + nullable: false + mysql:datatype: BIGINT + ivoa:ucd: meta.id;src + - name: ccdVisitId + "@id": "#DiaForcedSource.ccdVisitId" + datatype: long + nullable: false + description: Id of the visit where this forcedSource was measured. Note that we + are allowing a forcedSource to belong to multiple amplifiers, but it may not + span multiple ccds. + mysql:datatype: BIGINT + ivoa:ucd: meta.id;obs.image + - name: midpointMjdTai + "@id": "#DiaForcedSource.midpointMjdTai" + datatype: double + nullable: false + description: Effective mid-exposure time for this diaForcedSource + ivoa:ucd: time.epoch + fits:tunit: d + - name: flags + "@id": "#DiaForcedSource.flags" + datatype: long + nullable: false + description: Flags, bitwise OR tbd + value: 0 + mysql:datatype: BIGINT + ivoa:ucd: meta.code + primaryKey: + - "#DiaForcedSource.diaObjectId" + - "#DiaForcedSource.ccdVisitId" + indexes: + - name: IDX_DiaForcedSource_ccdVisitId + "@id": "#IDX_DiaForcedSource_ccdVisitId" + columns: + - "#DiaForcedSource.ccdVisitId" +- name: DiaObject_To_Object_Match + "@id": "#DiaObject_To_Object_Match" + description: The table stores mapping of diaObjects to the nearby objects. + columns: + - name: diaObjectId + "@id": "#DiaObject_To_Object_Match.diaObjectId" + datatype: long + nullable: false + description: Id of diaObject. + mysql:datatype: BIGINT + ivoa:ucd: meta.id;src + - name: objectId + "@id": "#DiaObject_To_Object_Match.objectId" + datatype: long + nullable: false + description: Id of a nearby object. + mysql:datatype: BIGINT + ivoa:ucd: meta.id;src + - name: dist + "@id": "#DiaObject_To_Object_Match.dist" + datatype: float + description: The distance between the diaObject and the object. + mysql:datatype: FLOAT + fits:tunit: arcsec + indexes: + - name: IDX_DiaObjectToObjectMatch_diaObjectId + "@id": "#IDX_DiaObjectToObjectMatch_diaObjectId" + columns: + - "#DiaObject_To_Object_Match.diaObjectId" + - name: IDX_DiaObjectToObjectMatch_objectId + "@id": "#IDX_DiaObjectToObjectMatch_objectId" + columns: + - "#DiaObject_To_Object_Match.objectId" +- name: DiaObjectLast + "@id": "#DiaObjectLast" + columns: + - name: diaObjectId + "@id": "#DiaObjectLast.diaObjectId" + datatype: long + nullable: false + description: Unique id. + ivoa:ucd: meta.id;src + - name: ra + "@id": "#DiaObjectLast.ra" + datatype: double + nullable: false + description: RA-coordinate of the position of the object at time radecTai. + ivoa:ucd: pos.eq.ra + fits:tunit: deg + - name: dec + "@id": "#DiaObjectLast.dec" + datatype: double + nullable: false + description: Decl-coordinate of the position of the object at time radecTai. + ivoa:ucd: pos.eq.dec + fits:tunit: deg + - name: lastNonForcedSource + "@id": "#DiaObjectLast.lastNonForcedSource" + datatype: timestamp + length: 6 + nullable: false + description: Last time when non-forced DIASource was seen for this object. + mysql:datatype: DATETIME + - name: nDiaSources + "@id": "#DiaObjectLast.nDiaSources" + datatype: int + nullable: false + description: Total number of DiaSources associated with this DiaObject. + primaryKey: "#DiaObjectLast.diaObjectId" diff --git a/tests/test_ppdbSql.py b/tests/test_ppdbSql.py new file mode 100644 index 0000000..e03877e --- /dev/null +++ b/tests/test_ppdbSql.py @@ -0,0 +1,98 @@ +# This file is part of dax_ppdb. +# +# Developed for the LSST Data Management System. +# This product includes software developed by the LSST Project +# (http://www.lsst.org). +# See the COPYRIGHT file at the top-level directory of this distribution +# for details of code ownership. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +import gc +import os +import shutil +import tempfile +import unittest +from typing import Any + +from lsst.dax.ppdb import PpdbConfig +from lsst.dax.ppdb.sql import PpdbSql +from lsst.dax.ppdb.tests import PpdbTest + +try: + import testing.postgresql +except ImportError: + testing = None + +TEST_SCHEMA = os.path.join(os.path.abspath(os.path.dirname(__file__)), "config/schema.yaml") + + +class ApdbSQLiteTestCase(PpdbTest, unittest.TestCase): + """A test case for PpdbSql class using SQLite backend.""" + + def setUp(self) -> None: + self.tempdir = tempfile.mkdtemp() + self.db_url = f"sqlite:///{self.tempdir}/apdb.sqlite3" + + def tearDown(self) -> None: + shutil.rmtree(self.tempdir, ignore_errors=True) + + def make_instance(self, **kwargs: Any) -> PpdbConfig: + """Make config class instance used in all tests.""" + return PpdbSql.init_database(db_url=self.db_url, schema_file=TEST_SCHEMA, **kwargs) + + +@unittest.skipUnless(testing is not None, "testing.postgresql module not found") +class ApdbPostgresTestCase(PpdbTest, unittest.TestCase): + """A test case for ApdbSql class using Postgres backend.""" + + @classmethod + def setUpClass(cls) -> None: + # Create the postgres test server. + cls.postgresql = testing.postgresql.PostgresqlFactory(cache_initialized_db=True) + super().setUpClass() + + @classmethod + def tearDownClass(cls) -> None: + # Clean up any lingering SQLAlchemy engines/connections + # so they're closed before we shut down the server. + gc.collect() + cls.postgresql.clear_cache() + super().tearDownClass() + + def setUp(self) -> None: + self.server = self.postgresql() + + def tearDown(self) -> None: + self.server = self.postgresql() + + def make_instance(self, **kwargs: Any) -> PpdbConfig: + """Make config class instance used in all tests.""" + return PpdbSql.init_database(db_url=self.server.url(), schema_file=TEST_SCHEMA, **kwargs) + + +@unittest.skipUnless(testing is not None, "testing.postgresql module not found") +class ApdbPostgresNamespaceTestCase(ApdbPostgresTestCase): + """A test case for ApdbSql class using Postgres backend with schema name""" + + # use mixed case to trigger quoting + schema_name = "test_schema001" + + def make_instance(self, **kwargs: Any) -> PpdbConfig: + """Make config class instance used in all tests.""" + return super().make_instance(schema_name=self.schema_name, **kwargs) + + +if __name__ == "__main__": + unittest.main()