diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile new file mode 100644 index 0000000..1fa545e --- /dev/null +++ b/.devcontainer/Dockerfile @@ -0,0 +1,29 @@ +# Use the official lightweight Python image. +# https://hub.docker.com/_/python +FROM python:3.10.13 AS base + +# Install system dependencies required for Poetry +RUN apt-get update \ + && apt-get install -y curl git \ + # && apt-get install -y curl build-essential git \ + && apt-get clean \ + && rm -rf /var/lib/apt/lists/* + +# Install Poetry using recommended installer script +ENV POETRY_VERSION=1.7.0 \ + # Install Poetry globally + POETRY_HOME="/usr/local" \ + POETRY_NO_INTERACTION=1 \ + # Ensure that the poetry path is in the PATH + PATH="/usr/local/bin:$PATH" + +# Install Poetry - respects $POETRY_VERSION +RUN curl -sSL https://install.python-poetry.org | python3 - + +# Create a non-root user and switch to it +RUN useradd --create-home akm_user + +# Switch to the non-root user +USER akm_user + +CMD ["bash"] diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json new file mode 100644 index 0000000..8ef8821 --- /dev/null +++ b/.devcontainer/devcontainer.json @@ -0,0 +1,16 @@ +{ + "name": "akm-develop", + // "context" is the path that the Codespaces docker build command should be run from, relative to devcontainer.json + "context": "..", + "dockerFile": "Dockerfile", + + "customizations": { + "vscode": { + "extensions": ["ms-python.python","redhat.vscode-yaml","ms-vscode.makefile-tools"], + "settings": { + "terminal.integrated.defaultProfile.linux": "bash" + } + } + }, + "postCreateCommand": "poetry install && poetry shell" +} diff --git a/.github/workflows/run_pytest.yaml b/.github/workflows/run_pytest.yaml new file mode 100644 index 0000000..f495fee --- /dev/null +++ b/.github/workflows/run_pytest.yaml @@ -0,0 +1,35 @@ +name: Pytest Workflow + +# Triggers the workflow on push or pull request events for the main branch +on: + pull_request: + push: + branches: + - "**" + +jobs: + test: + name: Run Pytest + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: setup python + uses: actions/setup-python@v5 + with: + python-version: '3.10.13' + + - name: install poetry + run: | + curl -sSL https://install.python-poetry.org | python3 - + + - name: install pacakges + run: | + poetry config virtualenvs.in-project true + poetry install + + - name: run pytest + run: | + poetry run pytest tests \ No newline at end of file diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..a272ade --- /dev/null +++ b/.gitignore @@ -0,0 +1,159 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +*,cover + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +*.log.* +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# Shell script unit test framework +scripts/shunit2 + +# Sonarqube +.scannerwork/ + +# Other +_deps +.pytype +.vagrant +site/ +.idea +.env-vlab* +.benchmarks +.conan-config +.vscode +mosquitto.conf + +# local dbs +/*.db + +## local temp files and logs on root level +/*.xlsx +/*.sql +/*.json +/*.log +/*.yaml \ No newline at end of file diff --git a/README.md b/README.md index e30adf0..5ae941d 100644 --- a/README.md +++ b/README.md @@ -1 +1,18 @@ # Automotive Knowledge Model (AKM) +This repo contains the Automotive Knowledge Model (AKM), an open-source data model and metadata catalog for transmitting vehicle signals in a consistent manner. Please see the overview.md file in the Documentation folder for more information. That folder also contains markdown files for principles and usage guidelines. + +The repo has four subfolders: + +- **\schema** contains the JSON Schema file(s) that provide the structure and meaning of the automotive metadata files +- **\data** contains the JSON documents that contain the actual Automotive metadata +- **\documentation** contains markdown files that explain aspects of the AKM +- **\rdf** contains a turtle file that expresses the structure and metadata in an ontology + +The repo is currently in an alpha release state and should be considered a **DRAFT**. It requires the following work to make it generally available: + +- The format and structure of the JSON Schema document requires testing, restructuring, and other quality reviews. +- The tooling that converts VSS to other formats must be available to the AKM. (The AKM tools should be simpler because of the many existing JSON Schema libraries, tooling, etc.) +- The processing of VSS overlays should be supported. +- An assessment of how exposing a DAG that is not necessarily the VSS tree would affect the [VISS](https://www.w3.org/TR/viss2-core/). +- The structure of the data subfolders should be appraised. +- The generation of JSON data documents from RDF and vice versa should be developed. \ No newline at end of file diff --git a/akm_tools/README.md b/akm_tools/README.md new file mode 100644 index 0000000..fddbfe0 --- /dev/null +++ b/akm_tools/README.md @@ -0,0 +1,37 @@ +# AKM Tools + +This project provides a set of tools for parsing, validating, and exporting Automotive Knowledge Model (AKM) data. It supports handling JSON files from specified directories, validating them against given schemas, and exporting the validated data into different formats including JSON and YAML. The functionality is encapsulated into a Python script that can be executed from the command line, offering flexibility for automation and integration into larger systems or workflows. + +## Features + +- **Data Validation**: Validate the combined data against a provided schema and optional extended schemas. +- **Data Exporting**: Export the validated data into various formats such as JSON and YAML. Support for GraphQL export is planned but not yet implemented. + + +## Usage + +The main functionality is accessed through the command line interface (CLI) provided by `akm_parser.py`. Below are the available options and their descriptions: + +### Command Line Arguments + +- `-d`, `--model_data_folder`: Specifies the directory containing AKM model data in JSON format. Default is `akm/data`. +- `-s`, `--schema`: Specifies the schema file against which the data will be validated. Default is `akm/schema/automotive_knowledge_model.json`. +- `-xs`, `--extended_schema_dir`: Specifies the directory containing extended schema files for validation. Default is `extensions/schema`. +- `-xd`, `--extended_data_dir`: Specifies the directory containing extended data. Default is `extensions/data`. + +- `-e`, `--export_format`: Specifies the format for exporting validated data. Options are `json`, `yaml`, and `graphql`. +- `-f`, `--export_file_path`: Specifies the path for the export file. Required if `--export_format` is specified. + +### Example Commands + +Validate data without exporting: +``` +python akm_tools/akm_parser.py -xd your_extended_data_folder +``` + +Export validated data to JSON: +``` +python akm_tools/akm_parser.py -d your_model_data_folder -e json -f path/to/export.json +``` +### Logging +Validation errors are logged to validation_errors.log, aiding in troubleshooting and ensuring data quality. \ No newline at end of file diff --git a/akm_tools/__init__.py b/akm_tools/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/akm_tools/akm_parser.py b/akm_tools/akm_parser.py new file mode 100644 index 0000000..5dac401 --- /dev/null +++ b/akm_tools/akm_parser.py @@ -0,0 +1,116 @@ +import os +import json +import argparse +from pprint import pprint as pp +from akm_tools.validation import AKMDataValidator +from akm_tools.utils import YamlCustomDumper + + +def parse_data_from_file(file_name): + with open(file_name, "r") as f: + data = json.load(f) + return data + + +def create_and_combine_json_from_a_folder(dir_path): + list_to_return = [] + for root, dirs, files in os.walk(dir_path): + for file in files: + if file.endswith(".json"): + file_path = os.path.join(root, file) + list_to_return = list_to_return + parse_data_from_file(file_path) + return list_to_return + + +def parse_and_validate_data(model_folder, schema, extended_schema_dir, overlays): + model_data_list = create_and_combine_json_from_a_folder(model_folder) + overlay_data_list = create_and_combine_json_from_a_folder(overlays) + all_data = model_data_list + overlay_data_list + validator_object = AKMDataValidator(schema=schema) + validated_model_data = validator_object.validate_data_instances(all_data, extended_schema_dir=extended_schema_dir) + validated_model_data = validator_object.validate_contexts(all_data=validated_model_data) ## passing valid instances + validator_object.log_errors() + + return validated_model_data + + +def export_to_json(validated_model_data, file_name): + with open(file_name, "w") as fw: + fw.write(json.dumps(validated_model_data, indent=4)) + + +def export_to_yaml(validated_model_data, file_name): + with open(file_name, "w") as fw: + fw.write(YamlCustomDumper.dumps(validated_model_data)) + + +# def export_to_graphql(validated_model_data, file_name): +# print("to be implemented") + + +def main(): + # Mapping of format choices to their corresponding functions + export_functions = { + "json": export_to_json, + "yaml": export_to_yaml, + # "graphql": export_to_graphql, + } + + parser = argparse.ArgumentParser(description="Parse, validate, and optionally export AKM data.") + ## optional + parser.add_argument( + "-d", + "--model_data_folder", + type=str, + default="akm/data", + help="AKM model data folder", + ) + parser.add_argument( + "-s", + "--schema", + type=str, + default="akm/schema/automotive_knowledge_model.json", + help="AKM schema file", + ) + parser.add_argument( + "-xs", + "--extended_schema_dir", + type=str, + default="extensions/schema", + help="Directory for extended schema files", + ) + parser.add_argument( + "-xd", + "--extended_data_dir", + type=str, + default="extensions/data", + help="Directory for extended data", + ) + ## export options + parser.add_argument( + "-e", + "--export_format", + type=str, + choices=export_functions.keys(), + help="Specifies the export format", + ) + parser.add_argument("-f", "--export_file_path", type=str, help="Path for export file") + args = parser.parse_args() + + if args.export_format and not args.export_file_path: + parser.error("--export requires --format to be specified. Choose either 'json' or 'yaml'.") + + with open(args.schema, "r") as f: + schema = json.load(f) + validated_model_data = parse_and_validate_data( + args.model_data_folder, schema, args.extended_schema_dir, args.extended_data_dir + ) + + if args.export_format: + export_functions[args.export_format](validated_model_data, args.export_file_path) + else: + print("Export not requested. Validation complete.") + + +if __name__ == "__main__": + main() diff --git a/akm_tools/utils/__init__.py b/akm_tools/utils/__init__.py new file mode 100644 index 0000000..a82def8 --- /dev/null +++ b/akm_tools/utils/__init__.py @@ -0,0 +1 @@ +from .genutils import YamlCustomDumper diff --git a/akm_tools/utils/genutils.py b/akm_tools/utils/genutils.py new file mode 100644 index 0000000..165541e --- /dev/null +++ b/akm_tools/utils/genutils.py @@ -0,0 +1,33 @@ +import yaml + + +class NoAliasDumper(yaml.SafeDumper): + def ignore_aliases(self, data): + return True + + def write_line_break(self, data=None): + super().write_line_break(data) + if len(self.indents) == 1: + super().write_line_break() + + +class YamlCustomDumper: + def dumps(dictToDump): + yamlString = yaml.dump( + dictToDump, + default_flow_style=False, + sort_keys=False, + width=120, + Dumper=NoAliasDumper, + ) + return yamlString + + def dump(dictToDump, fileHandle): + yaml.dump( + dictToDump, + fileHandle, + default_flow_style=False, + sort_keys=False, + width=120, + Dumper=NoAliasDumper, + ) diff --git a/akm_tools/validation/__init__.py b/akm_tools/validation/__init__.py new file mode 100644 index 0000000..382e6d3 --- /dev/null +++ b/akm_tools/validation/__init__.py @@ -0,0 +1 @@ +from .validator import AKMDataValidator diff --git a/akm_tools/validation/custom_exceptions.py b/akm_tools/validation/custom_exceptions.py new file mode 100644 index 0000000..22dfb22 --- /dev/null +++ b/akm_tools/validation/custom_exceptions.py @@ -0,0 +1,24 @@ +from typing import Dict, List + + +class IDConflictException(Exception): + def __init__(self, instances: List[Dict]): + err_msg = f"More than 2 instances with same ID ! \n{instances}\n" + super().__init__(err_msg) + self.message = err_msg + + +class BaseInstanceOverwiteException(Exception): + def __init__(self, base_instance, extended_instance): + err_msg = ( + f"The extended instace :\n{extended_instance}\nis overwriting properties of base instance\n{base_instance}\n" + ) + super().__init__(err_msg) + self.message = err_msg + + +class InvalidReferentIDException(Exception): + def __init__(self, instance, referentID): + err_msg = f"The instance :\n{instance}\nis referencing an invalid id : '{referentID}'\n" + super().__init__(err_msg) + self.message = err_msg diff --git a/akm_tools/validation/data_context_validators.py b/akm_tools/validation/data_context_validators.py new file mode 100644 index 0000000..fac0b5b --- /dev/null +++ b/akm_tools/validation/data_context_validators.py @@ -0,0 +1,145 @@ +from abc import ABC, abstractmethod +from typing import Dict, List, Any +from .global_debug_config import GlobalDebugConfig +from .custom_exceptions import IDConflictException, BaseInstanceOverwiteException, InvalidReferentIDException + + +class AllDataContextValidators(ABC): + """ + Base Complete Data References/Context Validator Interface + These type of validators work on the context of all the data together, instead of just one instance + """ + + error_messages = [] + + @abstractmethod + def validate_data_contexts(self, all_data: List[Dict[str, Any]]): + pass + + @classmethod + def create_instance_dict(self, all_data): + # Populate the instance_dict dictionary + instance_dict = {} + for instance in all_data: + if "id" in instance: + instance_id = instance["id"] + if instance_id not in instance_dict: + # Initialize the ID key with a list containing the current instance + instance_dict[instance_id] = {"count": 1, "instances": [instance]} + else: + # Append the current instance to the list and increment the count + instance_dict[instance_id]["instances"].append(instance) + instance_dict[instance_id]["count"] += 1 + return instance_dict + + def _handle_error(self, exception_type, *args): + error_exception = exception_type(*args) + if GlobalDebugConfig.debug_mode: + raise error_exception + else: + self.error_messages.append(error_exception.message) + + +class ExtendedInstanceContentValidator(AllDataContextValidators): + """ + For Instances with duplicate "id", where one extends the other, + check if the extended Instance does not overwrite content of base instance + """ + + def __init__(self): + self.warning_messages = [] + + def validate_data_contexts(self, all_data: List[Dict[str, Any]]): + valid_data = [] + instance_dict = self.__class__.create_instance_dict(all_data) + + # Handle instances with same ids and prepare valid_data + for instance_id, instance_content in instance_dict.items(): + if len(instance_content) > 2: + self._handle_multiple_id_conflicts(instance_content) + if instance_content["count"] == 2: + # check if the insances are not overriding , but only extending existing data. + is_valid_extension, base_instance, extended_instance = self.__class__.check_data_is_extended_not_overwritten( + instance_content["instances"] + ) + if is_valid_extension: + valid_data.append(extended_instance) + self.warning_messages.append( + f"Base instance will be ignored. :\n{base_instance}\nwas extended by \n{extended_instance}" + ) + else: + valid_data.append(base_instance) + self._handle_extension_overwrite(base_instance, extended_instance) + else: + valid_data.append(instance_content["instances"][0]) ## there should be only one entry + return valid_data + + @classmethod + def check_data_is_extended_not_overwritten(self, instances: List[Dict]): + # Determine which instance is the base and which is the extension + instance1, instance2 = instances[0], instances[1] + base_instance, extended_instance = ( + (instance1, instance2) if len(instance1) <= len(instance2) else (instance2, instance1) + ) + # Check every property in the base instance to see if it exists in the extended instance + # with the same value + for key, value in base_instance.items(): + if key not in extended_instance or extended_instance[key] != value: + return False, base_instance, extended_instance + return True, base_instance, extended_instance + + def _handle_multiple_id_conflicts(self, instances: List[Dict]): + self._handle_error(IDConflictException, instances) + + def _handle_extension_overwrite(self, base_instance, extended_instance): + self._handle_error(BaseInstanceOverwiteException, base_instance, extended_instance) + + +class CrossReferenceValidator(AllDataContextValidators): + def __init__(self): + self.id_set = set() + + def validate_data_contexts(self, all_data): + # Create a dictionary mapping IDs to data instances + id_to_instance = {instance["id"]: instance for instance in all_data if "id" in instance} + + # Create a dictionary mapping IDs to their validity + id_to_validity = {id: None for id in id_to_instance} + + def is_valid(id): + # If the ID is not in the dictionary, it's invalid + if id not in id_to_instance: + return False + + # If the validity has already been determined, return it + if id_to_validity[id] is not None: + return id_to_validity[id] + + # Mark the ID as being checked to handle circular references + id_to_validity[id] = False + + instance = id_to_instance[id] + for key, value in instance.items(): + if ( + isinstance(value, dict) + and "referentEntityTypeID" in value ## this is hard dependency to schema for akm.Reference + and "referentID" in value + ): + if not is_valid(value["referentID"]): + return False + + # If all references are valid, the instance is valid + id_to_validity[id] = True + return True + + # Validate the references + for id in id_to_instance: + is_valid(id) + + # Collect the valid data + valid_data = [instance for id, instance in id_to_instance.items() if id_to_validity[id]] + + return valid_data + + def _handle_extension_overwrite(self, instance, referentID_value): + self._handle_error(InvalidReferentIDException, instance, referentID_value) diff --git a/akm_tools/validation/data_instace_validators.py b/akm_tools/validation/data_instace_validators.py new file mode 100644 index 0000000..d729738 --- /dev/null +++ b/akm_tools/validation/data_instace_validators.py @@ -0,0 +1,90 @@ +from abc import ABC, abstractmethod +from typing import Dict, List, Any, Optional +from referencing import Registry +from referencing.jsonschema import DRAFT202012 +from pathlib import Path +import json +from jsonschema.exceptions import ValidationError +from jsonschema import Draft202012Validator +from .global_debug_config import GlobalDebugConfig + + +class DataInstanceValidator(ABC): + """ + Base Instance Validator Interface + Use this for adding more Instance level Validator Classes + """ + + @abstractmethod + def validate(self, instance: dict, **kwargs): + pass + + +# Concrete Validator Implementations +class CoreJsonSchemaValidator: + def __init__(self, schema: Dict, extended_schema_dir: Optional[str]): + self.schema = schema + self.registry = None + self.main_validator = None + self.object_validators_dict = None + self.extended_schema_dir = extended_schema_dir + self._configure_registry_and_validators() + + def _configure_registry_and_validators(self): + self.registry = self._create_registry() + self._configure_validators() + + def _create_registry(self) -> Registry: + """Configure and return a registry with all schemas.""" + extension_schema_registry_entries = [(self.schema["$id"], DRAFT202012.create_resource(self.schema))] + + if self.extended_schema_dir: + path_extended_schema_dir = Path(self.extended_schema_dir) + try: + extended_schemas_list = [json.load(x.open()) for x in path_extended_schema_dir.glob("*.json")] + object_schema_registry_entries = [(x["$id"], DRAFT202012.create_resource(x)) for x in extended_schemas_list] + extension_schema_registry_entries += object_schema_registry_entries + except (IOError, json.JSONDecodeError) as e: + print(f"Error processing extended schemas: {e}") + + return Registry().with_resources(extension_schema_registry_entries) + + def configure_registry(self, registry: Registry): + self.registry = registry + self._configure_validators() + + def _configure_validators(self): + self.object_validators_dict = self._create_individual_object_validators() + self.main_validator = self._create_main_validator() + + def _create_main_validator(self): + return Draft202012Validator(self.schema, registry=self.registry) + + def _create_individual_object_validators(self) -> Dict[str, any]: + validators = {} + if "$defs" in self.schema: + for key, schema_def in self.schema["$defs"].items(): + validators[key] = Draft202012Validator(schema_def, registry=self.registry) + return validators + + def validate(self, instance: dict, **kwargs): + try: + self.main_validator.validate(instance=instance) + return True, "" + except ValidationError as e: + if GlobalDebugConfig.debug_mode: + raise e + else: + ## main validator Failed + base_error_msg = f"Validation Error for {(e.message)}\n" + if "entityTypeID" in instance.keys(): + if instance["entityTypeID"] in self.object_validators_dict.keys(): + additioanl_error_info = sorted( + self.object_validators_dict[instance["entityTypeID"]].iter_errors(instance), + key=lambda e: e.path, + ) + base_error_msg += "\n".join(x.message for x in additioanl_error_info) + base_error_msg += "\n" + return False, base_error_msg + except Exception as e: + raise e diff --git a/akm_tools/validation/global_debug_config.py b/akm_tools/validation/global_debug_config.py new file mode 100644 index 0000000..0b5e989 --- /dev/null +++ b/akm_tools/validation/global_debug_config.py @@ -0,0 +1,10 @@ +class GlobalDebugConfig: + debug_mode = False + + @classmethod + def set_debug_mode(cls): + cls.debug_mode = True + + @classmethod + def unset_debug_mode(cls): + cls.debug_mode = False diff --git a/akm_tools/validation/validator.py b/akm_tools/validation/validator.py new file mode 100644 index 0000000..57001b0 --- /dev/null +++ b/akm_tools/validation/validator.py @@ -0,0 +1,65 @@ +import logging +from typing import Dict, List + +from .data_instace_validators import CoreJsonSchemaValidator +from .data_context_validators import ExtendedInstanceContentValidator, CrossReferenceValidator + +# Set up logging +logging.basicConfig( + filename="validation_errors.log", + level=logging.INFO, + format="%(message)s", + filemode="w", +) + + +# Validation Orchestrator Class +class AKMDataValidator: + def __init__( + self, + schema, + data_instance_validators=[CoreJsonSchemaValidator], + complete_data_context_validators=[ExtendedInstanceContentValidator, CrossReferenceValidator], + ): + self.schema = schema + self.data_instance_validators_class_list = data_instance_validators + self.complete_data_validators_class_list = complete_data_context_validators + self.validation_errors = [] + self.extended_schema_dir = None + + def validate_data_instances(self, all_data: List[Dict], **kwargs): + print("Validating Data Instances") + if "extended_schema_dir" in kwargs.keys(): + self.extended_schema_dir = kwargs["extended_schema_dir"] + ## configure data_instance_validator_objects + data_instance_validator_objects = self._configure_data_instance_validators() + valid_data = [] + for instance in all_data: + for instance_validator in data_instance_validator_objects: + valid, error_msg = instance_validator.validate(instance=instance) + if valid: + valid_data.append(instance) + else: + self.validation_errors.append(error_msg) + return valid_data + + def _configure_data_instance_validators(self): + data_instance_validator_objects = [ + obj(schema=self.schema, extended_schema_dir=self.extended_schema_dir) + for obj in self.data_instance_validators_class_list + ] + return data_instance_validator_objects + + def log_errors(self): + for error_msg in self.validation_errors: + logging.error(error_msg) + + def validate_contexts(self, all_data: List[Dict]): + print("Validating Data Contexts") + # Perform cross-reference validation if the cross-reference validator is included + valid_data = all_data + for context_validator in self.complete_data_validators_class_list: + context_validator_object = context_validator() + valid_data = context_validator_object.validate_data_contexts(valid_data) + self.validation_errors = self.validation_errors + context_validator_object.error_messages + return valid_data diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000..4fd53bd --- /dev/null +++ b/poetry.lock @@ -0,0 +1,446 @@ +# This file is automatically @generated by Poetry 1.7.0 and should not be changed by hand. + +[[package]] +name = "attrs" +version = "23.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] + +[[package]] +name = "black" +version = "24.2.0" +description = "The uncompromising code formatter." +optional = false +python-versions = ">=3.8" +files = [ + {file = "black-24.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6981eae48b3b33399c8757036c7f5d48a535b962a7c2310d19361edeef64ce29"}, + {file = "black-24.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d533d5e3259720fdbc1b37444491b024003e012c5173f7d06825a77508085430"}, + {file = "black-24.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61a0391772490ddfb8a693c067df1ef5227257e72b0e4108482b8d41b5aee13f"}, + {file = "black-24.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:992e451b04667116680cb88f63449267c13e1ad134f30087dec8527242e9862a"}, + {file = "black-24.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:163baf4ef40e6897a2a9b83890e59141cc8c2a98f2dda5080dc15c00ee1e62cd"}, + {file = "black-24.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e37c99f89929af50ffaf912454b3e3b47fd64109659026b678c091a4cd450fb2"}, + {file = "black-24.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9de21bafcba9683853f6c96c2d515e364aee631b178eaa5145fc1c61a3cc92"}, + {file = "black-24.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:9db528bccb9e8e20c08e716b3b09c6bdd64da0dd129b11e160bf082d4642ac23"}, + {file = "black-24.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d84f29eb3ee44859052073b7636533ec995bd0f64e2fb43aeceefc70090e752b"}, + {file = "black-24.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e08fb9a15c914b81dd734ddd7fb10513016e5ce7e6704bdd5e1251ceee51ac9"}, + {file = "black-24.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:810d445ae6069ce64030c78ff6127cd9cd178a9ac3361435708b907d8a04c693"}, + {file = "black-24.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:ba15742a13de85e9b8f3239c8f807723991fbfae24bad92d34a2b12e81904982"}, + {file = "black-24.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7e53a8c630f71db01b28cd9602a1ada68c937cbf2c333e6ed041390d6968faf4"}, + {file = "black-24.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:93601c2deb321b4bad8f95df408e3fb3943d85012dddb6121336b8e24a0d1218"}, + {file = "black-24.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0057f800de6acc4407fe75bb147b0c2b5cbb7c3ed110d3e5999cd01184d53b0"}, + {file = "black-24.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:faf2ee02e6612577ba0181f4347bcbcf591eb122f7841ae5ba233d12c39dcb4d"}, + {file = "black-24.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:057c3dc602eaa6fdc451069bd027a1b2635028b575a6c3acfd63193ced20d9c8"}, + {file = "black-24.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:08654d0797e65f2423f850fc8e16a0ce50925f9337fb4a4a176a7aa4026e63f8"}, + {file = "black-24.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca610d29415ee1a30a3f30fab7a8f4144e9d34c89a235d81292a1edb2b55f540"}, + {file = "black-24.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:4dd76e9468d5536abd40ffbc7a247f83b2324f0c050556d9c371c2b9a9a95e31"}, + {file = "black-24.2.0-py3-none-any.whl", hash = "sha256:e8a6ae970537e67830776488bca52000eaa37fa63b9988e8c487458d9cd5ace6"}, + {file = "black-24.2.0.tar.gz", hash = "sha256:bce4f25c27c3435e4dace4815bcb2008b87e167e3bf4ee47ccdc5ce906eb4894"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +packaging = ">=22.0" +pathspec = ">=0.9.0" +platformdirs = ">=2" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "jsonschema" +version = "4.21.1" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jsonschema-4.21.1-py3-none-any.whl", hash = "sha256:7996507afae316306f9e2290407761157c6f78002dcf7419acb99822143d1c6f"}, + {file = "jsonschema-4.21.1.tar.gz", hash = "sha256:85727c00279f5fa6bedbe6238d2aa6403bedd8b4864ab11207d07df3cc1b2ee5"}, +] + +[package.dependencies] +attrs = ">=22.2.0" +jsonschema-specifications = ">=2023.03.6" +referencing = ">=0.28.4" +rpds-py = ">=0.7.1" + +[package.extras] +format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] +format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] + +[[package]] +name = "jsonschema-specifications" +version = "2023.12.1" +description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jsonschema_specifications-2023.12.1-py3-none-any.whl", hash = "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c"}, + {file = "jsonschema_specifications-2023.12.1.tar.gz", hash = "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc"}, +] + +[package.dependencies] +referencing = ">=0.31.0" + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "packaging" +version = "23.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, +] + +[[package]] +name = "pathspec" +version = "0.12.1" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, +] + +[[package]] +name = "platformdirs" +version = "4.2.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] + +[[package]] +name = "pluggy" +version = "1.4.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pytest" +version = "8.0.1" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-8.0.1-py3-none-any.whl", hash = "sha256:3e4f16fe1c0a9dc9d9389161c127c3edc5d810c38d6793042fb81d9f48a59fca"}, + {file = "pytest-8.0.1.tar.gz", hash = "sha256:267f6563751877d772019b13aacbe4e860d73fe8f651f28112e9ac37de7513ae"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=1.3.0,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "referencing" +version = "0.33.0" +description = "JSON Referencing + Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "referencing-0.33.0-py3-none-any.whl", hash = "sha256:39240f2ecc770258f28b642dd47fd74bc8b02484de54e1882b74b35ebd779bd5"}, + {file = "referencing-0.33.0.tar.gz", hash = "sha256:c775fedf74bc0f9189c2a3be1c12fd03e8c23f4d371dce795df44e06c5b412f7"}, +] + +[package.dependencies] +attrs = ">=22.2.0" +rpds-py = ">=0.7.0" + +[[package]] +name = "rpds-py" +version = "0.18.0" +description = "Python bindings to Rust's persistent data structures (rpds)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "rpds_py-0.18.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:5b4e7d8d6c9b2e8ee2d55c90b59c707ca59bc30058269b3db7b1f8df5763557e"}, + {file = "rpds_py-0.18.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c463ed05f9dfb9baebef68048aed8dcdc94411e4bf3d33a39ba97e271624f8f7"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01e36a39af54a30f28b73096dd39b6802eddd04c90dbe161c1b8dbe22353189f"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d62dec4976954a23d7f91f2f4530852b0c7608116c257833922a896101336c51"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dd18772815d5f008fa03d2b9a681ae38d5ae9f0e599f7dda233c439fcaa00d40"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:923d39efa3cfb7279a0327e337a7958bff00cc447fd07a25cddb0a1cc9a6d2da"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39514da80f971362f9267c600b6d459bfbbc549cffc2cef8e47474fddc9b45b1"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a34d557a42aa28bd5c48a023c570219ba2593bcbbb8dc1b98d8cf5d529ab1434"}, + {file = "rpds_py-0.18.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:93df1de2f7f7239dc9cc5a4a12408ee1598725036bd2dedadc14d94525192fc3"}, + {file = "rpds_py-0.18.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:34b18ba135c687f4dac449aa5157d36e2cbb7c03cbea4ddbd88604e076aa836e"}, + {file = "rpds_py-0.18.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c0b5dcf9193625afd8ecc92312d6ed78781c46ecbf39af9ad4681fc9f464af88"}, + {file = "rpds_py-0.18.0-cp310-none-win32.whl", hash = "sha256:c4325ff0442a12113a6379af66978c3fe562f846763287ef66bdc1d57925d337"}, + {file = "rpds_py-0.18.0-cp310-none-win_amd64.whl", hash = "sha256:7223a2a5fe0d217e60a60cdae28d6949140dde9c3bcc714063c5b463065e3d66"}, + {file = "rpds_py-0.18.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:3a96e0c6a41dcdba3a0a581bbf6c44bb863f27c541547fb4b9711fd8cf0ffad4"}, + {file = "rpds_py-0.18.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30f43887bbae0d49113cbaab729a112251a940e9b274536613097ab8b4899cf6"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fcb25daa9219b4cf3a0ab24b0eb9a5cc8949ed4dc72acb8fa16b7e1681aa3c58"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d68c93e381010662ab873fea609bf6c0f428b6d0bb00f2c6939782e0818d37bf"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b34b7aa8b261c1dbf7720b5d6f01f38243e9b9daf7e6b8bc1fd4657000062f2c"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2e6d75ab12b0bbab7215e5d40f1e5b738aa539598db27ef83b2ec46747df90e1"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b8612cd233543a3781bc659c731b9d607de65890085098986dfd573fc2befe5"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:aec493917dd45e3c69d00a8874e7cbed844efd935595ef78a0f25f14312e33c6"}, + {file = "rpds_py-0.18.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:661d25cbffaf8cc42e971dd570d87cb29a665f49f4abe1f9e76be9a5182c4688"}, + {file = "rpds_py-0.18.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1df3659d26f539ac74fb3b0c481cdf9d725386e3552c6fa2974f4d33d78e544b"}, + {file = "rpds_py-0.18.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a1ce3ba137ed54f83e56fb983a5859a27d43a40188ba798993812fed73c70836"}, + {file = "rpds_py-0.18.0-cp311-none-win32.whl", hash = "sha256:69e64831e22a6b377772e7fb337533c365085b31619005802a79242fee620bc1"}, + {file = "rpds_py-0.18.0-cp311-none-win_amd64.whl", hash = "sha256:998e33ad22dc7ec7e030b3df701c43630b5bc0d8fbc2267653577e3fec279afa"}, + {file = "rpds_py-0.18.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:7f2facbd386dd60cbbf1a794181e6aa0bd429bd78bfdf775436020172e2a23f0"}, + {file = "rpds_py-0.18.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1d9a5be316c15ffb2b3c405c4ff14448c36b4435be062a7f578ccd8b01f0c4d8"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd5bf1af8efe569654bbef5a3e0a56eca45f87cfcffab31dd8dde70da5982475"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5417558f6887e9b6b65b4527232553c139b57ec42c64570569b155262ac0754f"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:56a737287efecafc16f6d067c2ea0117abadcd078d58721f967952db329a3e5c"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8f03bccbd8586e9dd37219bce4d4e0d3ab492e6b3b533e973fa08a112cb2ffc9"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4457a94da0d5c53dc4b3e4de1158bdab077db23c53232f37a3cb7afdb053a4e3"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0ab39c1ba9023914297dd88ec3b3b3c3f33671baeb6acf82ad7ce883f6e8e157"}, + {file = "rpds_py-0.18.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9d54553c1136b50fd12cc17e5b11ad07374c316df307e4cfd6441bea5fb68496"}, + {file = "rpds_py-0.18.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0af039631b6de0397ab2ba16eaf2872e9f8fca391b44d3d8cac317860a700a3f"}, + {file = "rpds_py-0.18.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:84ffab12db93b5f6bad84c712c92060a2d321b35c3c9960b43d08d0f639d60d7"}, + {file = "rpds_py-0.18.0-cp312-none-win32.whl", hash = "sha256:685537e07897f173abcf67258bee3c05c374fa6fff89d4c7e42fb391b0605e98"}, + {file = "rpds_py-0.18.0-cp312-none-win_amd64.whl", hash = "sha256:e003b002ec72c8d5a3e3da2989c7d6065b47d9eaa70cd8808b5384fbb970f4ec"}, + {file = "rpds_py-0.18.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:08f9ad53c3f31dfb4baa00da22f1e862900f45908383c062c27628754af2e88e"}, + {file = "rpds_py-0.18.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c0013fe6b46aa496a6749c77e00a3eb07952832ad6166bd481c74bda0dcb6d58"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e32a92116d4f2a80b629778280103d2a510a5b3f6314ceccd6e38006b5e92dcb"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e541ec6f2ec456934fd279a3120f856cd0aedd209fc3852eca563f81738f6861"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bed88b9a458e354014d662d47e7a5baafd7ff81c780fd91584a10d6ec842cb73"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2644e47de560eb7bd55c20fc59f6daa04682655c58d08185a9b95c1970fa1e07"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e8916ae4c720529e18afa0b879473049e95949bf97042e938530e072fde061d"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:465a3eb5659338cf2a9243e50ad9b2296fa15061736d6e26240e713522b6235c"}, + {file = "rpds_py-0.18.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ea7d4a99f3b38c37eac212dbd6ec42b7a5ec51e2c74b5d3223e43c811609e65f"}, + {file = "rpds_py-0.18.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:67071a6171e92b6da534b8ae326505f7c18022c6f19072a81dcf40db2638767c"}, + {file = "rpds_py-0.18.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:41ef53e7c58aa4ef281da975f62c258950f54b76ec8e45941e93a3d1d8580594"}, + {file = "rpds_py-0.18.0-cp38-none-win32.whl", hash = "sha256:fdea4952db2793c4ad0bdccd27c1d8fdd1423a92f04598bc39425bcc2b8ee46e"}, + {file = "rpds_py-0.18.0-cp38-none-win_amd64.whl", hash = "sha256:7cd863afe7336c62ec78d7d1349a2f34c007a3cc6c2369d667c65aeec412a5b1"}, + {file = "rpds_py-0.18.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:5307def11a35f5ae4581a0b658b0af8178c65c530e94893345bebf41cc139d33"}, + {file = "rpds_py-0.18.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:77f195baa60a54ef9d2de16fbbfd3ff8b04edc0c0140a761b56c267ac11aa467"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39f5441553f1c2aed4de4377178ad8ff8f9d733723d6c66d983d75341de265ab"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9a00312dea9310d4cb7dbd7787e722d2e86a95c2db92fbd7d0155f97127bcb40"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f2fc11e8fe034ee3c34d316d0ad8808f45bc3b9ce5857ff29d513f3ff2923a1"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:586f8204935b9ec884500498ccc91aa869fc652c40c093bd9e1471fbcc25c022"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ddc2f4dfd396c7bfa18e6ce371cba60e4cf9d2e5cdb71376aa2da264605b60b9"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5ddcba87675b6d509139d1b521e0c8250e967e63b5909a7e8f8944d0f90ff36f"}, + {file = "rpds_py-0.18.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7bd339195d84439cbe5771546fe8a4e8a7a045417d8f9de9a368c434e42a721e"}, + {file = "rpds_py-0.18.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:d7c36232a90d4755b720fbd76739d8891732b18cf240a9c645d75f00639a9024"}, + {file = "rpds_py-0.18.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6b0817e34942b2ca527b0e9298373e7cc75f429e8da2055607f4931fded23e20"}, + {file = "rpds_py-0.18.0-cp39-none-win32.whl", hash = "sha256:99f70b740dc04d09e6b2699b675874367885217a2e9f782bdf5395632ac663b7"}, + {file = "rpds_py-0.18.0-cp39-none-win_amd64.whl", hash = "sha256:6ef687afab047554a2d366e112dd187b62d261d49eb79b77e386f94644363294"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ad36cfb355e24f1bd37cac88c112cd7730873f20fb0bdaf8ba59eedf8216079f"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:36b3ee798c58ace201289024b52788161e1ea133e4ac93fba7d49da5fec0ef9e"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8a2f084546cc59ea99fda8e070be2fd140c3092dc11524a71aa8f0f3d5a55ca"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e4461d0f003a0aa9be2bdd1b798a041f177189c1a0f7619fe8c95ad08d9a45d7"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8db715ebe3bb7d86d77ac1826f7d67ec11a70dbd2376b7cc214199360517b641"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:793968759cd0d96cac1e367afd70c235867831983f876a53389ad869b043c948"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66e6a3af5a75363d2c9a48b07cb27c4ea542938b1a2e93b15a503cdfa8490795"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ef0befbb5d79cf32d0266f5cff01545602344eda89480e1dd88aca964260b18"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:1d4acf42190d449d5e89654d5c1ed3a4f17925eec71f05e2a41414689cda02d1"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:a5f446dd5055667aabaee78487f2b5ab72e244f9bc0b2ffebfeec79051679984"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:9dbbeb27f4e70bfd9eec1be5477517365afe05a9b2c441a0b21929ee61048124"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:22806714311a69fd0af9b35b7be97c18a0fc2826e6827dbb3a8c94eac6cf7eeb"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:b34ae4636dfc4e76a438ab826a0d1eed2589ca7d9a1b2d5bb546978ac6485461"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c8370641f1a7f0e0669ddccca22f1da893cef7628396431eb445d46d893e5cd"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c8362467a0fdeccd47935f22c256bec5e6abe543bf0d66e3d3d57a8fb5731863"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:11a8c85ef4a07a7638180bf04fe189d12757c696eb41f310d2426895356dcf05"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b316144e85316da2723f9d8dc75bada12fa58489a527091fa1d5a612643d1a0e"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf1ea2e34868f6fbf070e1af291c8180480310173de0b0c43fc38a02929fc0e3"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e546e768d08ad55b20b11dbb78a745151acbd938f8f00d0cfbabe8b0199b9880"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:4901165d170a5fde6f589acb90a6b33629ad1ec976d4529e769c6f3d885e3e80"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:618a3d6cae6ef8ec88bb76dd80b83cfe415ad4f1d942ca2a903bf6b6ff97a2da"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:ed4eb745efbff0a8e9587d22a84be94a5eb7d2d99c02dacf7bd0911713ed14dd"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6c81e5f372cd0dc5dc4809553d34f832f60a46034a5f187756d9b90586c2c307"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:43fbac5f22e25bee1d482c97474f930a353542855f05c1161fd804c9dc74a09d"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d7faa6f14017c0b1e69f5e2c357b998731ea75a442ab3841c0dbbbfe902d2c4"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:08231ac30a842bd04daabc4d71fddd7e6d26189406d5a69535638e4dcb88fe76"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:044a3e61a7c2dafacae99d1e722cc2d4c05280790ec5a05031b3876809d89a5c"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3f26b5bd1079acdb0c7a5645e350fe54d16b17bfc5e71f371c449383d3342e17"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:482103aed1dfe2f3b71a58eff35ba105289b8d862551ea576bd15479aba01f66"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1374f4129f9bcca53a1bba0bb86bf78325a0374577cf7e9e4cd046b1e6f20e24"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:635dc434ff724b178cb192c70016cc0ad25a275228f749ee0daf0eddbc8183b1"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:bc362ee4e314870a70f4ae88772d72d877246537d9f8cb8f7eacf10884862432"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:4832d7d380477521a8c1644bbab6588dfedea5e30a7d967b5fb75977c45fd77f"}, + {file = "rpds_py-0.18.0.tar.gz", hash = "sha256:42821446ee7a76f5d9f71f9e33a4fb2ffd724bb3e7f93386150b61a43115788d"}, +] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "typing-extensions" +version = "4.9.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, + {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, +] + +[metadata] +lock-version = "2.0" +python-versions = "^3.10" +content-hash = "8bcacca50ab544267b3cf0ffca0e66771127c0ea1afa971fdbe8c9b20d01417c" diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..4756c2e --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,19 @@ +[tool.poetry] +name = "akm_tools" +version = "0.1.0" +description = "" +authors = ["Mohit Yadav "] + +[tool.poetry.dependencies] +python = "^3.10" +PyYAML = "*" +jsonschema = "*" +black = "^24.2.0" + + +[tool.poetry.group.dev.dependencies] +pytest = "^8.0.1" + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..d9ecffe --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,245 @@ +import pytest +from referencing.jsonschema import DRAFT202012 +from referencing import Registry, Resource + +## Assumptions/Requiremets : All types of schema will have ['id','type'] present and required for objects + + +@pytest.fixture +def simple_schema(): + schema = { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$id": "https://somehost.com/test.scehma.json", + "type": "object", + "properties": { + "type": {"type": "string"}, + "age": {"type": "number"}, + "id": {"type": "string"}, + }, + "required": ["id"], + "additionalProperties": False, + } + return schema + + +@pytest.fixture +def simple_data(): + data = [ + {"type": "John", "age": 30, "id": "unique_id_1"}, + {"type": "Jane", "age": 25, "id": "unique_id_2"}, + ] + return data + + +@pytest.fixture +def simple_data_with_more_attributes(): + data = [ + {"type": "John", "age": 30, "id": "unique_id_1", "extra_attribute": "wild"}, + {"type": "Jane", "age": 25, "id": "unique_id_2", "extra_attribute": "grass"}, + ] + return data + + +@pytest.fixture +def simple_data_without_required_attribute(): + data = [{"type": "John", "age": 30}, {"type": "Jane", "age": 25}] + return data + + +@pytest.fixture +def complex_schema_with_defs(): + schema = { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$id": "complexSchema", + "oneOf": [ + {"$ref": "complexSchema.ObjectType1"}, + {"$ref": "complexSchema.ObjectType2"}, + ], + "$defs": { + "BaseClass": { + "$id": "complexSchema.BaseClass", + "type": "object", + "properties": { + "id": {"type": "string"}, + "definition": {"type": "string"}, + }, + "required": ["id"], + }, + "ObjectType1": { + "$id": "complexSchema.ObjectType1", + "type": "object", + "allOf": [{"$ref": "complexSchema.BaseClass"}], + "properties": { + "name": {"type": "string"}, + "description": {"type": "string"}, + "type": {"type": "string", "const": "ObjectType1"}, + }, + "required": ["name", "type"], + "unevaluatedProperties": False, + }, + "ObjectType2": { + "$id": "complexSchema.ObjectType2", + "type": "object", + "allOf": [{"$ref": "complexSchema.BaseClass"}], + "properties": { + "age": {"type": "number"}, + "type": {"type": "string", "const": "ObjectType2"}, + }, + "required": ["type"], + "unevaluatedProperties": False, + }, + }, + } + schema_resources = [("complex_scehma", DRAFT202012.create_resource(schema))] + registry = Registry().with_resources(schema_resources) + return schema, registry + + +@pytest.fixture +def complex_data(): + data = [ + { + "id": "unique_id_1", + "definition": "Some def1", + "name": "AttributeName", + "type": "ObjectType1", + "description": "some desc", + }, + {"id": "unique_id_2", "type": "ObjectType2", "age": 10}, + ] + return data + + +@pytest.fixture +def complex_data_missing_required_attributes(): ## id/type is missing. + data = [ + { + "definition": "Some def1", + "name": "AttributeName", + "type": "ObjectType1", + "description": "some desc", + }, + { + "type": "ObjectType2", + "age": 10, + }, + ] + return data + + +@pytest.fixture +def complex_data_with_additional_attributes(): + data = [ + { + "id": "unique_id_1", + "definition": "Some def1", + "name": "AttributeName", + "type": "ObjectType1", + "description": "some desc", + "extra_attribute": "wild", + }, + { + "id": "unique_id_2", + "type": "ObjectType2", + "age": 10, + "extra_attribute": "grass", + }, + ] + return data + + +@pytest.fixture +def data_with_duplicate_ids(): + data = [ + { + "id": "unique_id_1", + "definition": "Some def1", + "name": "AttributeName", + "type": "ObjectType1", + "description": "some desc", + }, + { + "id": "unique_id_1", + "definition": "Some def2", + "name": "AttributeName2", + "type": "ObjectType2", + "description": "some desc2", + }, + ] + return data + + +@pytest.fixture +def scehma_with_extensions(): + schema = { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$id": "schema_with_extensions", + "type": "object", + "allOf": [ + {"$ref": "extension.additional_properties"}, + ], + "properties": { + "id": {"type": "string"}, + "description": {"type": "string"}, + "entityType": {"type": "string", "const": "ObjectType3"}, + }, + "required": ["entityType"], + "unevaluatedProperties": False, + } + schema_extension = { + "$id": "extension.additional_properties", + "type": "object", + "properties": {"extended_property": {"type": "string"}}, + } + schema_resources = [ + ("complex_scehma", DRAFT202012.create_resource(schema)), + ("schema_extension", DRAFT202012.create_resource(schema_extension)), + ] + registry = Registry().with_resources(schema_resources) + return schema, registry + + +@pytest.fixture +def data_with_extended_properties(): + data = [ + { + "id": "unique_id1", + "entityType": "ObjectType3", + "extended_property": "any string", + } + ] + return data + + +@pytest.fixture +def overlay_existing_data_with_addional_properties(): + data = [ + { + "id": "unique_id1", + "entityType": "ObjectType3", + }, + { + "id": "unique_id1", + "entityType": "ObjectType3", + "extended_property": "any string", + }, + ] + return data + + +@pytest.fixture +def ovewrite_existing_data(): + data = [ + { + "id": "unique_id1", + "description": "description for unique_id1", + "entityType": "ObjectType3", + }, + { + "id": "unique_id1", + "entityType": "CHANGED", + "description": "description CHANGED", + "extended_property": "any string", + }, + ] + return data diff --git a/tests/test_AllDataContextValidators.py b/tests/test_AllDataContextValidators.py new file mode 100644 index 0000000..2b8e91a --- /dev/null +++ b/tests/test_AllDataContextValidators.py @@ -0,0 +1,19 @@ +import pytest +from akm_tools.validation.data_context_validators import AllDataContextValidators + + +def test_create_instance_dict(): + """ + Test case for the create_instance_dict method of the AllDataContextValidators class. + + This test checks if the create_instance_dict method correctly creates a dictionary + that maps instance id's to a dictionary containing the count of instances with that ID + and a list of the instances themselves. + """ + all_data = [{"id": "1a", "name": "test1"}, {"id": "2b", "name": "test2"}, {"id": "1a", "name": "test3"}] + instance_dict = AllDataContextValidators.create_instance_dict(all_data) + expected_dict = { + "1a": {"count": 2, "instances": [{"id": "1a", "name": "test1"}, {"id": "1a", "name": "test3"}]}, + "2b": {"count": 1, "instances": [{"id": "2b", "name": "test2"}]}, + } + assert instance_dict == expected_dict, "The instance dictionary was not created correctly." diff --git a/tests/test_CoreJsonSchemaValidator.py b/tests/test_CoreJsonSchemaValidator.py new file mode 100644 index 0000000..aefae2e --- /dev/null +++ b/tests/test_CoreJsonSchemaValidator.py @@ -0,0 +1,90 @@ +import pytest +from akm_tools.validation.data_instace_validators import CoreJsonSchemaValidator + + +# Testing CoreJsonSchemaValidator with simple data +def test_simple_data_validator_with_valid_data(simple_schema, simple_data): + simple_data_validator = CoreJsonSchemaValidator(schema=simple_schema, extended_schema_dir=None) + valid_data = [] + for instance in simple_data: + is_valid, _ = simple_data_validator.validate(instance=instance) + valid_data.append(is_valid) + assert all(valid_data) == True + + +def test_simple_data_validator_with_invalid_data(simple_schema, simple_data_with_more_attributes): + simple_data_validator = CoreJsonSchemaValidator(schema=simple_schema, extended_schema_dir=None) + valid_data = [] + for instance in simple_data_with_more_attributes: + is_valid, _ = simple_data_validator.validate(instance=instance) + valid_data.append(is_valid) + assert all(valid_data) == False + + +def test_simple_data_validator_with_data_missing_attributes(simple_schema, simple_data_without_required_attribute): + simple_data_validator = CoreJsonSchemaValidator(schema=simple_schema, extended_schema_dir=None) + valid_data = [] + for instance in simple_data_without_required_attribute: + is_valid, _ = simple_data_validator.validate(instance=instance) + valid_data.append(is_valid) + assert all(valid_data) == False + + +def test_complex_data_validator_with_jsonschema_references(complex_schema_with_defs, complex_data): + """ + check if using references with $id works (a jsonschema feature) + """ + schema, registry = complex_schema_with_defs + complex_data_validator = CoreJsonSchemaValidator(schema=schema, extended_schema_dir=None) + complex_data_validator.registr = registry + valid_data = [] + for instance in complex_data: + is_valid, _ = complex_data_validator.validate(instance=instance) + valid_data.append(is_valid) + assert all(valid_data) == True + + +def test_complex_data_validator_with_missing_required_attributes( + complex_schema_with_defs, complex_data_missing_required_attributes +): + schema, registry = complex_schema_with_defs + complex_data_validator = CoreJsonSchemaValidator(schema=schema, extended_schema_dir=None) + complex_data_validator.configure_registry(registry) + valid_data = [] + for instance in complex_data_missing_required_attributes: + is_valid, _ = complex_data_validator.validate(instance=instance) + valid_data.append(is_valid) + assert all(valid_data) == False + + +def test_complex_data_validator_with_invalid_attribute(complex_schema_with_defs, complex_data_with_additional_attributes): + schema, registry = complex_schema_with_defs + complex_data_validator = CoreJsonSchemaValidator(schema=schema, extended_schema_dir=None) + complex_data_validator.configure_registry(registry) + valid_data = [] + for instance in complex_data_with_additional_attributes: + is_valid, _ = complex_data_validator.validate(instance=instance) + valid_data.append(is_valid) + assert all(valid_data) == False + + +def test_complex_data_validator_with_extended_data(scehma_with_extensions, data_with_extended_properties): + schema, registry = scehma_with_extensions + complex_data_validator = CoreJsonSchemaValidator(schema=schema, extended_schema_dir=None) + complex_data_validator.configure_registry(registry) + valid_data = [] + for instance in data_with_extended_properties: + is_valid, _ = complex_data_validator.validate(instance=instance) + valid_data.append(is_valid) + assert all(valid_data) == True + + +def test_complex_data_validator_with_extended_data(scehma_with_extensions, overlay_existing_data_with_addional_properties): + schema, registry = scehma_with_extensions + complex_data_validator = CoreJsonSchemaValidator(schema=schema, extended_schema_dir=None) + complex_data_validator.configure_registry(registry) + valid_data = [] + for instance in overlay_existing_data_with_addional_properties: + is_valid, _ = complex_data_validator.validate(instance=instance) + valid_data.append(is_valid) + assert all(valid_data) == True diff --git a/tests/test_CrossReferenceValidator.py b/tests/test_CrossReferenceValidator.py new file mode 100644 index 0000000..3abd88a --- /dev/null +++ b/tests/test_CrossReferenceValidator.py @@ -0,0 +1,71 @@ +import pytest +from akm_tools.validation.data_context_validators import CrossReferenceValidator + + +@pytest.fixture +def invalid_chain_of_references(): + return [ + {"id": "Component1", "entityTypeID": "Class1", "isA": {"referentEntityTypeID": "Class1", "referentID": "Object1"}}, + { + "id": "Object1", + "definition": "A component of a vehicle", + "entityTypeID": "Class1", + "isA": {"referentEntityTypeID": "Class1", "referentID": "Not_defined_Component"}, + }, + { + "id": "Component2", + "entityTypeID": "Class1", + "isA": {"referentEntityTypeID": "Class1", "referentID": "Component1"}, + }, + ] + + +@pytest.fixture +def valid_chain_of_reference(): + return [ + {"id": "Component1", "entityTypeID": "Class1", "isA": {"referentEntityTypeID": "Class1", "referentID": "Object1"}}, + { + "id": "Object1", + "entityTypeID": "Class1", + }, + {"id": "Component2", "entityTypeID": "Class1", "isA": {"referentEntityTypeID": "Class1", "referentID": "Object1"}}, + ] + + +@pytest.fixture +def reference_not_present(): + return [ + {"id": "Component1", "entityTypeID": "Class1", "isA": {"referentEntityTypeID": "Class1", "referentID": "Object1"}}, + {"id": "Component2", "entityTypeID": "Class1", "isA": {"referentEntityTypeID": "Class1", "referentID": "Object1"}}, + ] + + +@pytest.fixture +def circular_references(): + return [ + {"id": "Object1", "entityTypeID": "Class1", "isA": {"referentEntityTypeID": "Class1", "referentID": "Component1"}}, + {"id": "Component1", "entityTypeID": "Class1", "isA": {"referentEntityTypeID": "Class1", "referentID": "Object1"}}, + ] + + +def test_invalid_chain_of_references(invalid_chain_of_references): + validator = CrossReferenceValidator() + valid_data = validator.validate_data_contexts(invalid_chain_of_references) + assert len(valid_data) == 0, "The validator should return False for all instances" + + +def test_valid_chain_of_reference(valid_chain_of_reference): + validator = CrossReferenceValidator() + valid_data = validator.validate_data_contexts(valid_chain_of_reference) + assert len(valid_data) == 3, "The validator should return True for valid cross-references" + + +def test_reference_not_present(reference_not_present): + validator = CrossReferenceValidator() + valid_data = validator.validate_data_contexts(reference_not_present) + assert len(valid_data) == 0, "The validator should return False for all instances" + + +def test_circular_references(circular_references): + validator = CrossReferenceValidator() + assert validator.validate_data_contexts(circular_references) == [] diff --git a/tests/test_ExtendedInstanceContentValidator.py b/tests/test_ExtendedInstanceContentValidator.py new file mode 100644 index 0000000..e516f11 --- /dev/null +++ b/tests/test_ExtendedInstanceContentValidator.py @@ -0,0 +1,46 @@ +import pytest +from akm_tools.validation.data_context_validators import ExtendedInstanceContentValidator +from akm_tools.validation.custom_exceptions import BaseInstanceOverwiteException, IDConflictException +from akm_tools.validation.global_debug_config import GlobalDebugConfig + + +def test_extended_data_is_valid(overlay_existing_data_with_addional_properties): + assert ExtendedInstanceContentValidator.check_data_is_extended_not_overwritten( + overlay_existing_data_with_addional_properties + ) + + +def test_extended_data_is_used(overlay_existing_data_with_addional_properties): + validator = ExtendedInstanceContentValidator() + valid_data = validator.validate_data_contexts(overlay_existing_data_with_addional_properties) + assert valid_data[0] == { + "id": "unique_id1", + "entityType": "ObjectType3", + "extended_property": "any string", + } + assert len(validator.warning_messages) == 1 + assert len(validator.error_messages) == 0 + + +def test_overriding_base_data_not_allowed(ovewrite_existing_data): + validator = ExtendedInstanceContentValidator() + valid_data = validator.validate_data_contexts(ovewrite_existing_data) + assert valid_data[0] == { + "id": "unique_id1", + "description": "description for unique_id1", + "entityType": "ObjectType3", + } + assert len(validator.warning_messages) == 0 + assert len(validator.error_messages) == 1 + + +def test_overriding_base_data_in_debug_mode_raises_exception(ovewrite_existing_data): + GlobalDebugConfig.set_debug_mode() + validator = ExtendedInstanceContentValidator() + try: + valid_data = validator.validate_data_contexts(ovewrite_existing_data) + pytest.fail("BaseInstanceOverwiteException was not raised when expected.") + except BaseInstanceOverwiteException as e: + assert True + except Exception as e: + pytest.fail(f"Unexpected exception type raised: {type(e).__name__}") diff --git a/tests/test_custom_exceptions.py b/tests/test_custom_exceptions.py new file mode 100644 index 0000000..72b9388 --- /dev/null +++ b/tests/test_custom_exceptions.py @@ -0,0 +1,41 @@ +import pytest +from akm_tools.validation.custom_exceptions import ( + IDConflictException, + BaseInstanceOverwiteException, + InvalidReferentIDException, +) + + +def test_IDConflictException(): + """ + This error should be raised when there is more than 3 instances with the same ID + """ + instances = [{"id": 1}, {"id": 1}, {"id": 1}] + with pytest.raises(IDConflictException) as excinfo: + raise IDConflictException(instances) + assert str(excinfo.value) == f"More than 2 instances with same ID ! \n{instances}\n" + + +def test_BaseInstanceOverwiteException(): + """ + This error should be raised when an extended instance is overwriting properties of a base instance + """ + base_instance = {"id": "data_instance1"} + extended_instance = {"id": "data_instance2", "name": "test"} + with pytest.raises(BaseInstanceOverwiteException) as excinfo: + raise BaseInstanceOverwiteException(base_instance, extended_instance) + assert ( + str(excinfo.value) + == f"The extended instace :\n{extended_instance}\nis overwriting properties of base instance\n{base_instance}\n" + ) + + +def test_InvalidReferentIDException(): + """ + This error should be raised when data instace refers to an invalid id + """ + instance = {"id1": 1, "isA": {"referentEntityTypeID": "FeatureOfInterestClass", "referentID": "non_existing_id"}} + referentID_value = instance["isA"]["referentID"] + with pytest.raises(InvalidReferentIDException) as excinfo: + raise InvalidReferentIDException(instance, referentID_value) + assert str(excinfo.value) == f"The instance :\n{instance}\nis referencing an invalid id : '{referentID_value}'\n" diff --git a/tests/test_utils.py b/tests/test_utils.py new file mode 100644 index 0000000..d9f8f53 --- /dev/null +++ b/tests/test_utils.py @@ -0,0 +1,42 @@ +import pytest +import yaml +import io +from akm_tools.utils.genutils import YamlCustomDumper + + +@pytest.fixture +def yaml_data(): + return [{"a": 1, "b": 2}, {"c": 3, "d": 4}] + + +@pytest.fixture +def expected_yaml_string(): + yaml_string = """ +- a: 1 + b: 2 + +- c: 3 + d: 4 +""" + return yaml_string.lstrip() + + +def test_YamlCustomDumper_dumps(yaml_data, expected_yaml_string): + """ + This test checks if the dumps method correctly converts a list of dictionaries to a YAML string, with a line break after each instance. + """ + yaml_string = YamlCustomDumper.dumps(yaml_data) + assert yaml_string == expected_yaml_string, "The YAML string is properly formatted with a line break" + + +def test_YamlCustomDumper_dump(yaml_data, expected_yaml_string): + """ + This test checks if the dump method correctly converts a list of dictionaries to a YAML string, with a line break after each instance. + """ + file = io.StringIO() + YamlCustomDumper.dump(yaml_data, file) + + # Check if the file was written correctly + file.seek(0) + yaml_string = file.read() + assert yaml_string == expected_yaml_string, "The file was written correctly."