forked from open-metadata/OpenMetadata
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Fixes 17413: Fix one sided tests for columnValueLengthsToBeBetween an…
…d columnValuesToBeBetween (open-metadata#17423) * mysql integration tests * fix(data-quality): accept between with no bounds add between filters only when the bounds are defined. if they are not (ie: resolve to 'inf' values), do not add any filters * format * consolidated ingestion_config * format * fixed handling of date and time columns * fixed tests
- Loading branch information
Showing
15 changed files
with
415 additions
and
71 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Empty file.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,73 @@ | ||
import os | ||
from subprocess import CalledProcessError | ||
|
||
import pytest | ||
from sqlalchemy import create_engine | ||
from testcontainers.mysql import MySqlContainer | ||
|
||
from _openmetadata_testutils.helpers.docker import try_bind | ||
from metadata.generated.schema.api.services.createDatabaseService import ( | ||
CreateDatabaseServiceRequest, | ||
) | ||
from metadata.generated.schema.entity.services.databaseService import ( | ||
DatabaseServiceType, | ||
) | ||
|
||
|
||
@pytest.fixture(scope="module") | ||
def mysql_container(tmp_path_factory): | ||
"""Start a PostgreSQL container with the dvdrental database.""" | ||
test_db_tar_path = os.path.join( | ||
os.path.dirname(__file__), "data", "mysql", "test_db-1.0.7.tar.gz" | ||
) | ||
container = MySqlContainer(dbname="employees") | ||
with ( | ||
try_bind(container, 3306, 3307) if not os.getenv("CI") else container | ||
) as container: | ||
docker_container = container.get_wrapped_container() | ||
docker_container.exec_run(["mkdir", "-p", "/data"]) | ||
docker_container.put_archive("/data", open(test_db_tar_path, "rb")) | ||
for command in ( | ||
[ | ||
"sh", | ||
"-c", | ||
f"cd /data/test_db && mysql -uroot -p{container.password} < employees.sql", | ||
], | ||
[ | ||
"sh", | ||
"-c", | ||
f'mysql -uroot -p{container.password} -e \'GRANT SELECT ON employees.* TO "test"@"%";\'', | ||
], | ||
): | ||
res = docker_container.exec_run(command) | ||
if res[0] != 0: | ||
raise CalledProcessError( | ||
returncode=res[0], cmd=res, output=res[1].decode("utf-8") | ||
) | ||
engine = create_engine(container.get_connection_url()) | ||
engine.execute( | ||
"ALTER TABLE employees ADD COLUMN last_update TIMESTAMP DEFAULT CURRENT_TIMESTAMP" | ||
) | ||
engine.execute( | ||
"UPDATE employees SET last_update = hire_date + INTERVAL FLOOR(1 + RAND() * 500000) SECOND" | ||
) | ||
yield container | ||
|
||
|
||
@pytest.fixture(scope="module") | ||
def create_service_request(mysql_container, tmp_path_factory): | ||
return CreateDatabaseServiceRequest.model_validate( | ||
{ | ||
"name": "docker_test_" + tmp_path_factory.mktemp("mysql").name, | ||
"serviceType": DatabaseServiceType.Mysql.value, | ||
"connection": { | ||
"config": { | ||
"username": mysql_container.username, | ||
"authType": {"password": mysql_container.password}, | ||
"hostPort": "localhost:" | ||
+ mysql_container.get_exposed_port(mysql_container.port), | ||
"databaseSchema": mysql_container.dbname, | ||
} | ||
}, | ||
} | ||
) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,3 @@ | ||
# MySQL test db | ||
|
||
https://github.com/datacharmer/test_db |
Binary file not shown.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,222 @@ | ||
import sys | ||
from dataclasses import dataclass | ||
from datetime import datetime | ||
from typing import List | ||
|
||
import pytest | ||
|
||
from _openmetadata_testutils.pydantic.test_utils import assert_equal_pydantic_objects | ||
from metadata.data_quality.api.models import TestCaseDefinition | ||
from metadata.generated.schema.entity.services.databaseService import DatabaseService | ||
from metadata.generated.schema.metadataIngestion.testSuitePipeline import ( | ||
TestSuiteConfigType, | ||
TestSuitePipeline, | ||
) | ||
from metadata.generated.schema.tests.basic import TestCaseResult, TestCaseStatus | ||
from metadata.generated.schema.tests.testCase import TestCase | ||
from metadata.ingestion.ometa.ometa_api import OpenMetadata | ||
from metadata.workflow.data_quality import TestSuiteWorkflow | ||
from metadata.workflow.metadata import MetadataWorkflow | ||
|
||
if not sys.version_info >= (3, 9): | ||
pytest.skip("requires python 3.9+", allow_module_level=True) | ||
|
||
|
||
@pytest.fixture() | ||
def get_test_suite_config(workflow_config, sink_config): | ||
def inner(entity_fqn: str, test_case_definitions: List[TestCaseDefinition]): | ||
return { | ||
"source": { | ||
"type": TestSuiteConfigType.TestSuite.value, | ||
"serviceName": "MyTestSuite", | ||
"sourceConfig": { | ||
"config": TestSuitePipeline( | ||
type=TestSuiteConfigType.TestSuite, | ||
entityFullyQualifiedName=entity_fqn, | ||
) | ||
}, | ||
}, | ||
"processor": { | ||
"type": "orm-test-runner", | ||
"config": { | ||
"testCases": [obj.model_dump() for obj in test_case_definitions] | ||
}, | ||
}, | ||
"sink": sink_config, | ||
"workflowConfig": workflow_config, | ||
} | ||
|
||
return inner | ||
|
||
|
||
@dataclass | ||
class TestColumnParameter: | ||
entity_fqn: str | ||
test_case_definition: TestCaseDefinition | ||
expected_result: TestCaseResult | ||
|
||
|
||
@pytest.fixture( | ||
params=[ | ||
TestColumnParameter( | ||
entity_fqn="{database_service_fqn}.default.employees.employees", | ||
test_case_definition=TestCaseDefinition( | ||
name="first_name_includes_tom_and_jerry_wo_enum", | ||
testDefinitionName="columnValuesToBeInSet", | ||
computePassedFailedRowCount=True, | ||
columnName="first_name", | ||
parameterValues=[ | ||
{"name": "allowedValues", "value": "['Tom', 'Jerry']"} | ||
], | ||
), | ||
expected_result=TestCaseResult( | ||
testCaseStatus=TestCaseStatus.Failed, | ||
), | ||
), | ||
TestColumnParameter( | ||
entity_fqn="{database_service_fqn}.default.employees.employees", | ||
test_case_definition=TestCaseDefinition( | ||
name="value_lengths_between_3_and_5", | ||
testDefinitionName="columnValueLengthsToBeBetween", | ||
computePassedFailedRowCount=True, | ||
columnName="first_name", | ||
parameterValues=[ | ||
{"name": "minLength", "value": "3"}, | ||
{"name": "maxLength", "value": "5"}, | ||
], | ||
), | ||
expected_result=TestCaseResult( | ||
testCaseStatus=TestCaseStatus.Failed, | ||
), | ||
), | ||
TestColumnParameter( | ||
entity_fqn="{database_service_fqn}.default.employees.employees", | ||
test_case_definition=TestCaseDefinition( | ||
name="value_lengths_at_most_5", | ||
testDefinitionName="columnValueLengthsToBeBetween", | ||
columnName="first_name", | ||
computePassedFailedRowCount=True, | ||
parameterValues=[ | ||
{"name": "maxLength", "value": "5"}, | ||
], | ||
), | ||
expected_result=TestCaseResult( | ||
testCaseStatus=TestCaseStatus.Failed, | ||
), | ||
), | ||
TestColumnParameter( | ||
entity_fqn="{database_service_fqn}.default.employees.employees", | ||
test_case_definition=TestCaseDefinition( | ||
name="value_lengths_at_least_3", | ||
testDefinitionName="columnValueLengthsToBeBetween", | ||
columnName="first_name", | ||
computePassedFailedRowCount=True, | ||
parameterValues=[ | ||
{"name": "minLength", "value": "3"}, | ||
], | ||
), | ||
expected_result=TestCaseResult( | ||
testCaseStatus=TestCaseStatus.Success, | ||
), | ||
), | ||
TestColumnParameter( | ||
entity_fqn="{database_service_fqn}.default.employees.employees", | ||
test_case_definition=TestCaseDefinition( | ||
name="id_at_least_0", | ||
testDefinitionName="columnValuesToBeBetween", | ||
columnName="emp_no", | ||
computePassedFailedRowCount=True, | ||
parameterValues=[ | ||
{"name": "minValue", "value": "0"}, | ||
], | ||
), | ||
expected_result=TestCaseResult( | ||
testCaseStatus=TestCaseStatus.Success, | ||
), | ||
), | ||
TestColumnParameter( | ||
entity_fqn="{database_service_fqn}.default.employees.employees", | ||
test_case_definition=TestCaseDefinition( | ||
name="id_no_bounds", | ||
testDefinitionName="columnValuesToBeBetween", | ||
columnName="emp_no", | ||
computePassedFailedRowCount=True, | ||
parameterValues=[], | ||
), | ||
expected_result=TestCaseResult( | ||
testCaseStatus=TestCaseStatus.Success, | ||
), | ||
), | ||
TestColumnParameter( | ||
entity_fqn="{database_service_fqn}.default.employees.employees", | ||
test_case_definition=TestCaseDefinition( | ||
name="values_between_date", | ||
testDefinitionName="columnValuesToBeBetween", | ||
columnName="hire_date", | ||
computePassedFailedRowCount=True, | ||
parameterValues=[ | ||
{ | ||
"name": "minValue", | ||
"value": str(int(datetime(1960, 1, 1).timestamp())), | ||
}, | ||
], | ||
), | ||
expected_result=TestCaseResult( | ||
testCaseStatus=TestCaseStatus.Success, | ||
), | ||
), | ||
TestColumnParameter( | ||
entity_fqn="{database_service_fqn}.default.employees.employees", | ||
test_case_definition=TestCaseDefinition( | ||
name="value_between_timestamp", | ||
testDefinitionName="columnValuesToBeBetween", | ||
columnName="last_update", | ||
computePassedFailedRowCount=True, | ||
parameterValues=[ | ||
{ | ||
"name": "minValue", | ||
"value": str(int(datetime(2000, 1, 1).timestamp())), | ||
}, | ||
], | ||
), | ||
expected_result=TestCaseResult( | ||
testCaseStatus=TestCaseStatus.Failed, | ||
), | ||
), | ||
], | ||
ids=lambda x: x.test_case_definition.name, | ||
) | ||
def parameters(request, db_service): | ||
request.param.entity_fqn = request.param.entity_fqn.format( | ||
database_service_fqn=db_service.fullyQualifiedName.root | ||
) | ||
return request.param | ||
|
||
|
||
def test_column_test_cases( | ||
patch_passwords_for_db_services, | ||
run_workflow, | ||
ingestion_config, | ||
db_service: DatabaseService, | ||
metadata: OpenMetadata, | ||
parameters: TestColumnParameter, | ||
get_test_suite_config, | ||
cleanup_fqns, | ||
): | ||
run_workflow(MetadataWorkflow, ingestion_config) | ||
test_suite_config = get_test_suite_config( | ||
parameters.entity_fqn, | ||
[parameters.test_case_definition], | ||
) | ||
run_workflow(TestSuiteWorkflow, test_suite_config) | ||
test_case: TestCase = metadata.get_by_name( | ||
TestCase, | ||
f"{parameters.entity_fqn}.{parameters.test_case_definition.columnName}.{parameters.test_case_definition.name}", | ||
fields=["*"], | ||
nullable=False, | ||
) | ||
cleanup_fqns(TestCase, test_case.fullyQualifiedName.root) | ||
assert_equal_pydantic_objects( | ||
parameters.expected_result, | ||
test_case.testCaseResult, | ||
) |
Oops, something went wrong.