From 39885e1f0c1f687766fef9f1fd1eac61eeeff692 Mon Sep 17 00:00:00 2001 From: Ishan Date: Fri, 7 Jul 2023 13:55:10 +0530 Subject: [PATCH 01/38] glific connector initial commit --- .../connectors/source-glific/.dockerignore | 6 + .../connectors/source-glific/Dockerfile | 38 +++ .../connectors/source-glific/README.md | 138 ++++++++++ .../source-glific/acceptance-test-config.yml | 39 +++ .../source-glific/acceptance-test-docker.sh | 3 + .../connectors/source-glific/build.gradle | 9 + .../integration_tests/__init__.py | 3 + .../integration_tests/abnormal_state.json | 5 + .../integration_tests/acceptance.py | 16 ++ .../integration_tests/configured_catalog.json | 22 ++ .../integration_tests/invalid_config.json | 3 + .../integration_tests/sample_config.json | 3 + .../integration_tests/sample_state.json | 5 + .../connectors/source-glific/main.py | 13 + .../connectors/source-glific/metadata.yaml | 22 ++ .../connectors/source-glific/requirements.txt | 2 + .../connectors/source-glific/setup.py | 29 ++ .../source-glific/source_glific/__init__.py | 8 + .../source_glific/schemas/TODO.md | 25 ++ .../source_glific/schemas/customers.json | 16 ++ .../source_glific/schemas/employees.json | 19 ++ .../source-glific/source_glific/source.py | 256 ++++++++++++++++++ .../source-glific/source_glific/spec.yaml | 20 ++ .../source-glific/unit_tests/__init__.py | 3 + .../unit_tests/test_incremental_streams.py | 59 ++++ .../source-glific/unit_tests/test_source.py | 22 ++ .../source-glific/unit_tests/test_streams.py | 83 ++++++ 27 files changed, 867 insertions(+) create mode 100644 airbyte-integrations/connectors/source-glific/.dockerignore create mode 100644 airbyte-integrations/connectors/source-glific/Dockerfile create mode 100644 airbyte-integrations/connectors/source-glific/README.md create mode 100644 airbyte-integrations/connectors/source-glific/acceptance-test-config.yml create mode 100755 airbyte-integrations/connectors/source-glific/acceptance-test-docker.sh create mode 100644 airbyte-integrations/connectors/source-glific/build.gradle create mode 100644 airbyte-integrations/connectors/source-glific/integration_tests/__init__.py create mode 100644 airbyte-integrations/connectors/source-glific/integration_tests/abnormal_state.json create mode 100644 airbyte-integrations/connectors/source-glific/integration_tests/acceptance.py create mode 100644 airbyte-integrations/connectors/source-glific/integration_tests/configured_catalog.json create mode 100644 airbyte-integrations/connectors/source-glific/integration_tests/invalid_config.json create mode 100644 airbyte-integrations/connectors/source-glific/integration_tests/sample_config.json create mode 100644 airbyte-integrations/connectors/source-glific/integration_tests/sample_state.json create mode 100644 airbyte-integrations/connectors/source-glific/main.py create mode 100644 airbyte-integrations/connectors/source-glific/metadata.yaml create mode 100644 airbyte-integrations/connectors/source-glific/requirements.txt create mode 100644 airbyte-integrations/connectors/source-glific/setup.py create mode 100644 airbyte-integrations/connectors/source-glific/source_glific/__init__.py create mode 100644 airbyte-integrations/connectors/source-glific/source_glific/schemas/TODO.md create mode 100644 airbyte-integrations/connectors/source-glific/source_glific/schemas/customers.json create mode 100644 airbyte-integrations/connectors/source-glific/source_glific/schemas/employees.json create mode 100644 airbyte-integrations/connectors/source-glific/source_glific/source.py create mode 100644 airbyte-integrations/connectors/source-glific/source_glific/spec.yaml create mode 100644 airbyte-integrations/connectors/source-glific/unit_tests/__init__.py create mode 100644 airbyte-integrations/connectors/source-glific/unit_tests/test_incremental_streams.py create mode 100644 airbyte-integrations/connectors/source-glific/unit_tests/test_source.py create mode 100644 airbyte-integrations/connectors/source-glific/unit_tests/test_streams.py diff --git a/airbyte-integrations/connectors/source-glific/.dockerignore b/airbyte-integrations/connectors/source-glific/.dockerignore new file mode 100644 index 000000000000..b4602cfc138e --- /dev/null +++ b/airbyte-integrations/connectors/source-glific/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_glific +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-glific/Dockerfile b/airbyte-integrations/connectors/source-glific/Dockerfile new file mode 100644 index 000000000000..d68fafd35a2a --- /dev/null +++ b/airbyte-integrations/connectors/source-glific/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.13-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_glific ./source_glific + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-glific diff --git a/airbyte-integrations/connectors/source-glific/README.md b/airbyte-integrations/connectors/source-glific/README.md new file mode 100644 index 000000000000..55bd1ccb5caf --- /dev/null +++ b/airbyte-integrations/connectors/source-glific/README.md @@ -0,0 +1,138 @@ +# Glific Source + +This is the repository for the Glific source connector, written in Python. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.com/integrations/sources/glific). + +## Local development + +### Prerequisites +**To iterate on this connector, make sure to complete this prerequisites section.** + +#### Minimum Python version required `= 3.9.0` + +#### Build & Activate Virtual Environment and install dependencies +From this connector directory, create a virtual environment: +``` +python -m venv .venv +``` + +This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your +development environment of choice. To activate it from the terminal, run: +``` +source .venv/bin/activate +pip install -r requirements.txt +pip install '.[tests]' +``` +If you are in an IDE, follow your IDE's instructions to activate the virtualenv. + +Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is +used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. +If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything +should work as you expect. + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-glific:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.com/integrations/sources/glific) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_glific/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source glific test creds` +and place them into `secrets/config.json`. + +### Locally running the connector +``` +python main.py spec +python main.py check --config secrets/config.json +python main.py discover --config secrets/config.json +python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json +``` + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-glific:dev +``` + +If you want to build the Docker image with the CDK on your local machine (rather than the most recent package published to pypi), from the airbyte base directory run: +```bash +CONNECTOR_TAG= CONNECTOR_NAME= sh airbyte-integrations/scripts/build-connector-image-with-local-cdk.sh +``` + + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-glific:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-glific:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-glific:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-glific:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-glific:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing +Make sure to familiarize yourself with [pytest test discovery](https://docs.pytest.org/en/latest/goodpractices.html#test-discovery) to know how your test files and methods should be named. +First install test dependencies into your virtual environment: +``` +pip install .[tests] +``` +### Unit Tests +To run unit tests locally, from the connector directory run: +``` +python -m pytest unit_tests +``` + +### Integration Tests +There are two types of integration tests: Acceptance Tests (Airbyte's test suite for all source connectors) and custom integration tests (which are specific to this connector). +#### Custom Integration tests +Place custom tests inside `integration_tests/` folder, then, from the connector root, run +``` +python -m pytest integration_tests +``` +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. +To run your integration tests with acceptance tests, from the connector root, run +``` +python -m pytest integration_tests -p integration_tests.acceptance +``` +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-glific:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-glific:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-glific/acceptance-test-config.yml b/airbyte-integrations/connectors/source-glific/acceptance-test-config.yml new file mode 100644 index 000000000000..e19482a81505 --- /dev/null +++ b/airbyte-integrations/connectors/source-glific/acceptance-test-config.yml @@ -0,0 +1,39 @@ +# See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-glific:dev +acceptance_tests: + spec: + tests: + - spec_path: "source_glific/spec.yaml" + connection: + tests: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + tests: + - config_path: "secrets/config.json" + basic_read: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] +# TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file +# expect_records: +# path: "integration_tests/expected_records.jsonl" +# extra_fields: no +# exact_order: no +# extra_records: yes + incremental: + bypass_reason: "This connector does not implement incremental sync" +# TODO uncomment this block this block if your connector implements incremental sync: +# tests: +# - config_path: "secrets/config.json" +# configured_catalog_path: "integration_tests/configured_catalog.json" +# future_state: +# future_state_path: "integration_tests/abnormal_state.json" + full_refresh: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-glific/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-glific/acceptance-test-docker.sh new file mode 100755 index 000000000000..b6d65deeccb4 --- /dev/null +++ b/airbyte-integrations/connectors/source-glific/acceptance-test-docker.sh @@ -0,0 +1,3 @@ +#!/usr/bin/env sh + +source "$(git rev-parse --show-toplevel)/airbyte-integrations/bases/connector-acceptance-test/acceptance-test-docker.sh" diff --git a/airbyte-integrations/connectors/source-glific/build.gradle b/airbyte-integrations/connectors/source-glific/build.gradle new file mode 100644 index 000000000000..29fd3fefe607 --- /dev/null +++ b/airbyte-integrations/connectors/source-glific/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-connector-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_glific' +} diff --git a/airbyte-integrations/connectors/source-glific/integration_tests/__init__.py b/airbyte-integrations/connectors/source-glific/integration_tests/__init__.py new file mode 100644 index 000000000000..c941b3045795 --- /dev/null +++ b/airbyte-integrations/connectors/source-glific/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-glific/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-glific/integration_tests/abnormal_state.json new file mode 100644 index 000000000000..52b0f2c2118f --- /dev/null +++ b/airbyte-integrations/connectors/source-glific/integration_tests/abnormal_state.json @@ -0,0 +1,5 @@ +{ + "todo-stream-name": { + "todo-field-name": "todo-abnormal-value" + } +} diff --git a/airbyte-integrations/connectors/source-glific/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-glific/integration_tests/acceptance.py new file mode 100644 index 000000000000..9e6409236281 --- /dev/null +++ b/airbyte-integrations/connectors/source-glific/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("connector_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-glific/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-glific/integration_tests/configured_catalog.json new file mode 100644 index 000000000000..36f0468db0d8 --- /dev/null +++ b/airbyte-integrations/connectors/source-glific/integration_tests/configured_catalog.json @@ -0,0 +1,22 @@ +{ + "streams": [ + { + "stream": { + "name": "customers", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "employees", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append" + } + ] +} diff --git a/airbyte-integrations/connectors/source-glific/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-glific/integration_tests/invalid_config.json new file mode 100644 index 000000000000..f3732995784f --- /dev/null +++ b/airbyte-integrations/connectors/source-glific/integration_tests/invalid_config.json @@ -0,0 +1,3 @@ +{ + "todo-wrong-field": "this should be an incomplete config file, used in standard tests" +} diff --git a/airbyte-integrations/connectors/source-glific/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-glific/integration_tests/sample_config.json new file mode 100644 index 000000000000..ecc4913b84c7 --- /dev/null +++ b/airbyte-integrations/connectors/source-glific/integration_tests/sample_config.json @@ -0,0 +1,3 @@ +{ + "fix-me": "TODO" +} diff --git a/airbyte-integrations/connectors/source-glific/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-glific/integration_tests/sample_state.json new file mode 100644 index 000000000000..3587e579822d --- /dev/null +++ b/airbyte-integrations/connectors/source-glific/integration_tests/sample_state.json @@ -0,0 +1,5 @@ +{ + "todo-stream-name": { + "todo-field-name": "value" + } +} diff --git a/airbyte-integrations/connectors/source-glific/main.py b/airbyte-integrations/connectors/source-glific/main.py new file mode 100644 index 000000000000..76407bfc1e0b --- /dev/null +++ b/airbyte-integrations/connectors/source-glific/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_glific import SourceGlific + +if __name__ == "__main__": + source = SourceGlific() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-glific/metadata.yaml b/airbyte-integrations/connectors/source-glific/metadata.yaml new file mode 100644 index 000000000000..6706347bc70c --- /dev/null +++ b/airbyte-integrations/connectors/source-glific/metadata.yaml @@ -0,0 +1,22 @@ +data: + allowedHosts: + hosts: + - TODO # Please change to the hostname of the source. + registries: + oss: + enabled: false + connectorSubtype: api + connectorType: source + definitionId: 455d10e4-7d80-491a-93eb-a965d49e6e7b + dockerImageTag: 0.1.0 + dockerRepository: airbyte/source-glific + githubIssueLabel: source-glific + icon: glific.svg + license: MIT + name: Glific + releaseDate: TODO + releaseStage: alpha + documentationUrl: https://docs.airbyte.com/integrations/sources/glific + tags: + - language:python +metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-glific/requirements.txt b/airbyte-integrations/connectors/source-glific/requirements.txt new file mode 100644 index 000000000000..cc57334ef619 --- /dev/null +++ b/airbyte-integrations/connectors/source-glific/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/connector-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-glific/setup.py b/airbyte-integrations/connectors/source-glific/setup.py new file mode 100644 index 000000000000..50fe242637c9 --- /dev/null +++ b/airbyte-integrations/connectors/source-glific/setup.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.2", "sgqlc" +] + +TEST_REQUIREMENTS = [ + "pytest~=6.2", + "pytest-mock~=3.6.1", + "connector-acceptance-test", +] + +setup( + name="source_glific", + description="Source implementation for Glific.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-glific/source_glific/__init__.py b/airbyte-integrations/connectors/source-glific/source_glific/__init__.py new file mode 100644 index 000000000000..fa09759bb937 --- /dev/null +++ b/airbyte-integrations/connectors/source-glific/source_glific/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceGlific + +__all__ = ["SourceGlific"] diff --git a/airbyte-integrations/connectors/source-glific/source_glific/schemas/TODO.md b/airbyte-integrations/connectors/source-glific/source_glific/schemas/TODO.md new file mode 100644 index 000000000000..cf1efadb3c9c --- /dev/null +++ b/airbyte-integrations/connectors/source-glific/source_glific/schemas/TODO.md @@ -0,0 +1,25 @@ +# TODO: Define your stream schemas +Your connector must describe the schema of each stream it can output using [JSONSchema](https://json-schema.org). + +The simplest way to do this is to describe the schema of your streams using one `.json` file per stream. You can also dynamically generate the schema of your stream in code, or you can combine both approaches: start with a `.json` file and dynamically add properties to it. + +The schema of a stream is the return value of `Stream.get_json_schema`. + +## Static schemas +By default, `Stream.get_json_schema` reads a `.json` file in the `schemas/` directory whose name is equal to the value of the `Stream.name` property. In turn `Stream.name` by default returns the name of the class in snake case. Therefore, if you have a class `class EmployeeBenefits(HttpStream)` the default behavior will look for a file called `schemas/employee_benefits.json`. You can override any of these behaviors as you need. + +Important note: any objects referenced via `$ref` should be placed in the `shared/` directory in their own `.json` files. + +## Dynamic schemas +If you'd rather define your schema in code, override `Stream.get_json_schema` in your stream class to return a `dict` describing the schema using [JSONSchema](https://json-schema.org). + +## Dynamically modifying static schemas +Override `Stream.get_json_schema` to run the default behavior, edit the returned value, then return the edited value: +``` +def get_json_schema(self): + schema = super().get_json_schema() + schema['dynamically_determined_property'] = "property" + return schema +``` + +Delete this file once you're done. Or don't. Up to you :) diff --git a/airbyte-integrations/connectors/source-glific/source_glific/schemas/customers.json b/airbyte-integrations/connectors/source-glific/source_glific/schemas/customers.json new file mode 100644 index 000000000000..9a4b13485836 --- /dev/null +++ b/airbyte-integrations/connectors/source-glific/source_glific/schemas/customers.json @@ -0,0 +1,16 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "signup_date": { + "type": ["null", "string"], + "format": "date-time" + } + } +} diff --git a/airbyte-integrations/connectors/source-glific/source_glific/schemas/employees.json b/airbyte-integrations/connectors/source-glific/source_glific/schemas/employees.json new file mode 100644 index 000000000000..2fa01a0fa1ff --- /dev/null +++ b/airbyte-integrations/connectors/source-glific/source_glific/schemas/employees.json @@ -0,0 +1,19 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "years_of_service": { + "type": ["null", "integer"] + }, + "start_date": { + "type": ["null", "string"], + "format": "date-time" + } + } +} diff --git a/airbyte-integrations/connectors/source-glific/source_glific/source.py b/airbyte-integrations/connectors/source-glific/source_glific/source.py new file mode 100644 index 000000000000..0b7da33b9a91 --- /dev/null +++ b/airbyte-integrations/connectors/source-glific/source_glific/source.py @@ -0,0 +1,256 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +from abc import ABC +from typing import Any, Iterable, List, Mapping, MutableMapping, Optional, Tuple + +import requests +from airbyte_cdk.sources import AbstractSource +from airbyte_cdk.sources.streams import Stream +from airbyte_cdk.sources.streams.http import HttpStream +from sgqlc.endpoint.http import HTTPEndpoint + +""" +TODO: Most comments in this class are instructive and should be deleted after the source is implemented. + +This file provides a stubbed example of how to use the Airbyte CDK to develop both a source connector which supports full refresh or and an +incremental syncs from an HTTP API. + +The various TODOs are both implementation hints and steps - fulfilling all the TODOs should be sufficient to implement one basic and one incremental +stream from a source. This pattern is the same one used by Airbyte internally to implement connectors. + +The approach here is not authoritative, and devs are free to use their own judgement. + +There are additional required TODOs in the files within the integration_tests folder and the spec.yaml file. +""" + + +# Basic full refresh stream +class GlificStream(HttpStream, ABC): + """ + TODO remove this comment + + This class represents a stream output by the connector. + This is an abstract base class meant to contain all the common functionality at the API level e.g: the API base URL, pagination strategy, + parsing responses etc.. + + Each stream should extend this class (or another abstract subclass of it) to specify behavior unique to that stream. + + Typically for REST APIs each stream corresponds to a resource in the API. For example if the API + contains the endpoints + - GET v1/customers + - GET v1/employees + + then you should have three classes: + `class GlificStream(HttpStream, ABC)` which is the current class + `class Customers(GlificStream)` contains behavior to pull data for customers using v1/customers + `class Employees(GlificStream)` contains behavior to pull data for employees using v1/employees + + If some streams implement incremental sync, it is typical to create another class + `class IncrementalGlificStream((GlificStream), ABC)` then have concrete stream implementations extend it. An example + is provided below. + + See the reference docs for the full list of configurable options. + """ + + # TODO: Fill in the url base. Required. + url_base = "https://example-api.com/v1/" + + def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: + """ + TODO: Override this method to define a pagination strategy. If you will not be using pagination, no action is required - just return None. + + This method should return a Mapping (e.g: dict) containing whatever information required to make paginated requests. This dict is passed + to most other methods in this class to help you form headers, request bodies, query params, etc.. + + For example, if the API accepts a 'page' parameter to determine which page of the result to return, and a response from the API contains a + 'page' number, then this method should probably return a dict {'page': response.json()['page'] + 1} to increment the page count by 1. + The request_params method should then read the input next_page_token and set the 'page' param to next_page_token['page']. + + :param response: the most recent response from the API + :return If there is another page in the result, a mapping (e.g: dict) containing information needed to query the next page in the response. + If there are no more pages in the result, return None. + """ + return None + + def request_params( + self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, any] = None, next_page_token: Mapping[str, Any] = None + ) -> MutableMapping[str, Any]: + """ + TODO: Override this method to define any query parameters to be set. Remove this method if you don't need to define request params. + Usually contains common params e.g. pagination size etc. + """ + return {} + + def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: + """ + TODO: Override this method to define how a response is parsed. + :return an iterable containing each record in the response + """ + yield {} + + +class Customers(GlificStream): + """ + TODO: Change class name to match the table/data source this stream corresponds to. + """ + + # TODO: Fill in the primary key. Required. This is usually a unique field in the stream, like an ID or a timestamp. + primary_key = "customer_id" + + def path( + self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> str: + """ + TODO: Override this method to define the path this stream corresponds to. E.g. if the url is https://example-api.com/v1/customers then this + should return "customers". Required. + """ + return "customers" + + +# Basic incremental stream +class IncrementalGlificStream(GlificStream, ABC): + """ + TODO fill in details of this class to implement functionality related to incremental syncs for your connector. + if you do not need to implement incremental sync for any streams, remove this class. + """ + + # TODO: Fill in to checkpoint stream reads after N records. This prevents re-reading of data if the stream fails for any reason. + state_checkpoint_interval = None + + @property + def cursor_field(self) -> str: + """ + TODO + Override to return the cursor field used by this stream e.g: an API entity might always use created_at as the cursor field. This is + usually id or date based. This field's presence tells the framework this in an incremental stream. Required for incremental. + + :return str: The name of the cursor field. + """ + return [] + + def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]) -> Mapping[str, Any]: + """ + Override to determine the latest state after reading the latest record. This typically compared the cursor_field from the latest record and + the current state and picks the 'most' recent cursor. This is how a stream's state is determined. Required for incremental. + """ + return {} + + +class Employees(IncrementalGlificStream): + """ + TODO: Change class name to match the table/data source this stream corresponds to. + """ + + # TODO: Fill in the cursor_field. Required. + cursor_field = "start_date" + + # TODO: Fill in the primary key. Required. This is usually a unique field in the stream, like an ID or a timestamp. + primary_key = "employee_id" + + def path(self, **kwargs) -> str: + """ + TODO: Override this method to define the path this stream corresponds to. E.g. if the url is https://example-api.com/v1/employees then this should + return "single". Required. + """ + return "employees" + + def stream_slices(self, stream_state: Mapping[str, Any] = None, **kwargs) -> Iterable[Optional[Mapping[str, any]]]: + """ + TODO: Optionally override this method to define this stream's slices. If slicing is not needed, delete this method. + + Slices control when state is saved. Specifically, state is saved after a slice has been fully read. + This is useful if the API offers reads by groups or filters, and can be paired with the state object to make reads efficient. See the "concepts" + section of the docs for more information. + + The function is called before reading any records in a stream. It returns an Iterable of dicts, each containing the + necessary data to craft a request for a slice. The stream state is usually referenced to determine what slices need to be created. + This means that data in a slice is usually closely related to a stream's cursor_field and stream_state. + + An HTTP request is made for each returned slice. The same slice can be accessed in the path, request_params and request_header functions to help + craft that specific request. + + For example, if https://example-api.com/v1/employees offers a date query params that returns data for that particular day, one way to implement + this would be to consult the stream state object for the last synced date, then return a slice containing each date from the last synced date + till now. The request_params function would then grab the date from the stream_slice and make it part of the request by injecting it into + the date query param. + """ + raise NotImplementedError("Implement stream slices or delete this method!") + +# Source +class SourceGlific(AbstractSource): + API_URL = "https://api.staging.tides.coloredcow.com/api" + + + def check_connection(self, logger, config) -> Tuple[bool, any]: + """ + TODO: Implement a connection check to validate that the user-provided config can be used to connect to the underlying API + + See https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connectors/source-stripe/source_stripe/source.py#L232 + for an example. + + :param config: the user-input config object conforming to the connector's spec.yaml + :param logger: logger object + :return Tuple[bool, any]: (True, None) if the input config can be used to connect to the API successfully, (False, error) otherwise. + """ + if 'phone' not in config: + logger.info('Phone number missing') + return False, "Phone number missing" + + if 'password' not in config: + logger.info("Password missing") + return False, "Password missing" + + endpoint = f"{self.API_URL}/v1/session" + auth_payload = { + "user": { + "phone": config["phone"], + "password": config["password"] + } + } + + response = requests.post(endpoint, json=auth_payload, timeout=30) + try: + response.raise_for_status() + except requests.exceptions.HTTPError as err: + logger.info(err) + return False, response.error.message + + return True, None + + def streams(self, config: Mapping[str, Any]) -> List[Stream]: + """ + TODO: Replace the streams below with your own streams. + + :param config: A Mapping of the user input configuration as defined in the connector spec. + """ + + # authenticate and get the credentials for all streams + endpoint = f"{self.API_URL}/v1/session" + auth_payload = { + "user": { + "phone": config["phone"], + "password": config["password"] + } + } + try: + response = requests.post(endpoint, json=auth_payload, timeout=30) + response.raise_for_status() + except requests.exceptions.HTTPError: + # return empty zero streams since authentication failed + return [] + + # fetch the export config for streams + endpoint = "{self.API_URL}" + headers = {'Authorization': 'bearer TOKEN'} + + query = 'query { ... }' + variables = {'varName': 'value'} + + endpoint = HTTPEndpoint(endpoint, headers) + data = endpoint(query, variables) + + + return [] diff --git a/airbyte-integrations/connectors/source-glific/source_glific/spec.yaml b/airbyte-integrations/connectors/source-glific/source_glific/spec.yaml new file mode 100644 index 000000000000..83de8d98e8ed --- /dev/null +++ b/airbyte-integrations/connectors/source-glific/source_glific/spec.yaml @@ -0,0 +1,20 @@ +documentationUrl: https://docsurl.com +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Glific Spec + type: object + required: + - phone + - password + properties: + phone: + type: string + title: Username + description: Phone (12 digit) to authenticate into the your glific account + order: 0 + password: + type: string + title: Password + description: Password to authenticate into the your glific account + airbyte_secret: true + order: 1 diff --git a/airbyte-integrations/connectors/source-glific/unit_tests/__init__.py b/airbyte-integrations/connectors/source-glific/unit_tests/__init__.py new file mode 100644 index 000000000000..c941b3045795 --- /dev/null +++ b/airbyte-integrations/connectors/source-glific/unit_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-glific/unit_tests/test_incremental_streams.py b/airbyte-integrations/connectors/source-glific/unit_tests/test_incremental_streams.py new file mode 100644 index 000000000000..89f63820d2f2 --- /dev/null +++ b/airbyte-integrations/connectors/source-glific/unit_tests/test_incremental_streams.py @@ -0,0 +1,59 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +from airbyte_cdk.models import SyncMode +from pytest import fixture +from source_glific.source import IncrementalGlificStream + + +@fixture +def patch_incremental_base_class(mocker): + # Mock abstract methods to enable instantiating abstract class + mocker.patch.object(IncrementalGlificStream, "path", "v0/example_endpoint") + mocker.patch.object(IncrementalGlificStream, "primary_key", "test_primary_key") + mocker.patch.object(IncrementalGlificStream, "__abstractmethods__", set()) + + +def test_cursor_field(patch_incremental_base_class): + stream = IncrementalGlificStream() + # TODO: replace this with your expected cursor field + expected_cursor_field = [] + assert stream.cursor_field == expected_cursor_field + + +def test_get_updated_state(patch_incremental_base_class): + stream = IncrementalGlificStream() + # TODO: replace this with your input parameters + inputs = {"current_stream_state": None, "latest_record": None} + # TODO: replace this with your expected updated stream state + expected_state = {} + assert stream.get_updated_state(**inputs) == expected_state + + +def test_stream_slices(patch_incremental_base_class): + stream = IncrementalGlificStream() + # TODO: replace this with your input parameters + inputs = {"sync_mode": SyncMode.incremental, "cursor_field": [], "stream_state": {}} + # TODO: replace this with your expected stream slices list + expected_stream_slice = [None] + assert stream.stream_slices(**inputs) == expected_stream_slice + + +def test_supports_incremental(patch_incremental_base_class, mocker): + mocker.patch.object(IncrementalGlificStream, "cursor_field", "dummy_field") + stream = IncrementalGlificStream() + assert stream.supports_incremental + + +def test_source_defined_cursor(patch_incremental_base_class): + stream = IncrementalGlificStream() + assert stream.source_defined_cursor + + +def test_stream_checkpoint_interval(patch_incremental_base_class): + stream = IncrementalGlificStream() + # TODO: replace this with your expected checkpoint interval + expected_checkpoint_interval = None + assert stream.state_checkpoint_interval == expected_checkpoint_interval diff --git a/airbyte-integrations/connectors/source-glific/unit_tests/test_source.py b/airbyte-integrations/connectors/source-glific/unit_tests/test_source.py new file mode 100644 index 000000000000..b17747b06fb9 --- /dev/null +++ b/airbyte-integrations/connectors/source-glific/unit_tests/test_source.py @@ -0,0 +1,22 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from unittest.mock import MagicMock + +from source_glific.source import SourceGlific + + +def test_check_connection(mocker): + source = SourceGlific() + logger_mock, config_mock = MagicMock(), MagicMock() + assert source.check_connection(logger_mock, config_mock) == (True, None) + + +def test_streams(mocker): + source = SourceGlific() + config_mock = MagicMock() + streams = source.streams(config_mock) + # TODO: replace this with your streams number + expected_streams_number = 2 + assert len(streams) == expected_streams_number diff --git a/airbyte-integrations/connectors/source-glific/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-glific/unit_tests/test_streams.py new file mode 100644 index 000000000000..a580480ec160 --- /dev/null +++ b/airbyte-integrations/connectors/source-glific/unit_tests/test_streams.py @@ -0,0 +1,83 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from http import HTTPStatus +from unittest.mock import MagicMock + +import pytest +from source_glific.source import GlificStream + + +@pytest.fixture +def patch_base_class(mocker): + # Mock abstract methods to enable instantiating abstract class + mocker.patch.object(GlificStream, "path", "v0/example_endpoint") + mocker.patch.object(GlificStream, "primary_key", "test_primary_key") + mocker.patch.object(GlificStream, "__abstractmethods__", set()) + + +def test_request_params(patch_base_class): + stream = GlificStream() + # TODO: replace this with your input parameters + inputs = {"stream_slice": None, "stream_state": None, "next_page_token": None} + # TODO: replace this with your expected request parameters + expected_params = {} + assert stream.request_params(**inputs) == expected_params + + +def test_next_page_token(patch_base_class): + stream = GlificStream() + # TODO: replace this with your input parameters + inputs = {"response": MagicMock()} + # TODO: replace this with your expected next page token + expected_token = None + assert stream.next_page_token(**inputs) == expected_token + + +def test_parse_response(patch_base_class): + stream = GlificStream() + # TODO: replace this with your input parameters + inputs = {"response": MagicMock()} + # TODO: replace this with your expected parced object + expected_parsed_object = {} + assert next(stream.parse_response(**inputs)) == expected_parsed_object + + +def test_request_headers(patch_base_class): + stream = GlificStream() + # TODO: replace this with your input parameters + inputs = {"stream_slice": None, "stream_state": None, "next_page_token": None} + # TODO: replace this with your expected request headers + expected_headers = {} + assert stream.request_headers(**inputs) == expected_headers + + +def test_http_method(patch_base_class): + stream = GlificStream() + # TODO: replace this with your expected http request method + expected_method = "GET" + assert stream.http_method == expected_method + + +@pytest.mark.parametrize( + ("http_status", "should_retry"), + [ + (HTTPStatus.OK, False), + (HTTPStatus.BAD_REQUEST, False), + (HTTPStatus.TOO_MANY_REQUESTS, True), + (HTTPStatus.INTERNAL_SERVER_ERROR, True), + ], +) +def test_should_retry(patch_base_class, http_status, should_retry): + response_mock = MagicMock() + response_mock.status_code = http_status + stream = GlificStream() + assert stream.should_retry(response_mock) == should_retry + + +def test_backoff_time(patch_base_class): + response_mock = MagicMock() + stream = GlificStream() + expected_backoff_time = None + assert stream.backoff_time(response_mock) == expected_backoff_time From 8308605aed06012e277a9f7e0b2f7a3dd7849c05 Mon Sep 17 00:00:00 2001 From: Ishan Date: Mon, 10 Jul 2023 17:46:35 +0530 Subject: [PATCH 02/38] glific WIP --- .../source-glific/source_glific/source.py | 56 +++++++++++++++---- 1 file changed, 45 insertions(+), 11 deletions(-) diff --git a/airbyte-integrations/connectors/source-glific/source_glific/source.py b/airbyte-integrations/connectors/source-glific/source_glific/source.py index 0b7da33b9a91..02fc933b1242 100644 --- a/airbyte-integrations/connectors/source-glific/source_glific/source.py +++ b/airbyte-integrations/connectors/source-glific/source_glific/source.py @@ -4,12 +4,15 @@ from abc import ABC -from typing import Any, Iterable, List, Mapping, MutableMapping, Optional, Tuple +from typing import Any, Iterable, List, Mapping, MutableMapping, Optional, Tuple, Union +from airbyte_cdk.sources.streams.http.auth.core import HttpAuthenticator import requests +import json from airbyte_cdk.sources import AbstractSource from airbyte_cdk.sources.streams import Stream from airbyte_cdk.sources.streams.http import HttpStream +from requests.auth import AuthBase from sgqlc.endpoint.http import HTTPEndpoint """ @@ -55,8 +58,20 @@ class GlificStream(HttpStream, ABC): See the reference docs for the full list of configurable options. """ - # TODO: Fill in the url base. Required. - url_base = "https://example-api.com/v1/" + def __init__(self, stream_name: str, url_base: str, credentials: Mapping[str: str], **kwargs): + super().__init__(**kwargs) + + self.stream_name = stream_name + self.url_base = url_base + self.credentials = credentials + + @property + def url_base(self) -> str: + return self.url_base + + @property + def name(self) -> str: + return self.stream_name def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: """ @@ -74,6 +89,10 @@ def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, If there are no more pages in the result, return None. """ return None + + def request_headers(self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None) -> Mapping[str, Any]: + """Add the authorization token in the headers""" + return {'authorization': self.credentials['access_token'], 'Content-Type': 'application/json'} def request_params( self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, any] = None, next_page_token: Mapping[str, Any] = None @@ -83,6 +102,10 @@ def request_params( Usually contains common params e.g. pagination size etc. """ return {} + + def request_body_json(self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None) -> Mapping | None: + + return {} def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: """ @@ -238,19 +261,30 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: try: response = requests.post(endpoint, json=auth_payload, timeout=30) response.raise_for_status() + credentials = response['data'] except requests.exceptions.HTTPError: # return empty zero streams since authentication failed return [] - # fetch the export config for streams - endpoint = "{self.API_URL}" - headers = {'Authorization': 'bearer TOKEN'} + # fetch the export config for organization/client/user + endpoint = f"{self.API_URL}" + headers = {'authorization': credentials['access_token']} - query = 'query { ... }' - variables = {'varName': 'value'} + try: + query = 'query organizationExportConfig { organizationExportConfig { data errors { key message } } }' + variables = {} - endpoint = HTTPEndpoint(endpoint, headers) - data = endpoint(query, variables) + endpoint = HTTPEndpoint(endpoint, headers) + data = endpoint(query, variables) + except requests.exceptions.HTTPError: + # return empty zero streams since config could not be fetched + return [] + # construct streams + config = json.loads(data['data']['organizationExportConfig']['data']) + streams = [] + for table in config['tables']: + stream_obj = GlificStream(table, self.API_URL, credentials) + streams.append(stream_obj) - return [] + return streams From b3560b3f31b9d783d9090286d5a9dfd139f3df0b Mon Sep 17 00:00:00 2001 From: Ishan Date: Thu, 13 Jul 2023 15:08:35 +0530 Subject: [PATCH 03/38] glific full refresh --- .../source-glific/source_glific/source.py | 174 +++++++----------- .../source-glific/source_glific/spec.yaml | 6 + 2 files changed, 77 insertions(+), 103 deletions(-) diff --git a/airbyte-integrations/connectors/source-glific/source_glific/source.py b/airbyte-integrations/connectors/source-glific/source_glific/source.py index 02fc933b1242..5b907b17da5f 100644 --- a/airbyte-integrations/connectors/source-glific/source_glific/source.py +++ b/airbyte-integrations/connectors/source-glific/source_glific/source.py @@ -32,9 +32,10 @@ # Basic full refresh stream class GlificStream(HttpStream, ABC): - """ - TODO remove this comment + primary_key = None + + """ This class represents a stream output by the connector. This is an abstract base class meant to contain all the common functionality at the API level e.g: the API base URL, pagination strategy, parsing responses etc.. @@ -58,24 +59,34 @@ class GlificStream(HttpStream, ABC): See the reference docs for the full list of configurable options. """ - def __init__(self, stream_name: str, url_base: str, credentials: Mapping[str: str], **kwargs): + def __init__(self, stream_name: str, url_base: str, pagination_limit: int, credentials: dict, **kwargs): super().__init__(**kwargs) self.stream_name = stream_name - self.url_base = url_base + self.api_url = url_base self.credentials = credentials + self.pagination_limit = pagination_limit + self.offset = 0 @property def url_base(self) -> str: - return self.url_base + return self.api_url @property def name(self) -> str: return self.stream_name + + @property + def http_method(self) -> str: + """All requests in the glific stream are posts with body""" + return "POST" + + def path(self, *, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None) -> str: + return "" def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: """ - TODO: Override this method to define a pagination strategy. If you will not be using pagination, no action is required - just return None. + Override this method to define a pagination strategy. If you will not be using pagination, no action is required - just return None. This method should return a Mapping (e.g: dict) containing whatever information required to make paginated requests. This dict is passed to most other methods in this class to help you form headers, request bodies, query params, etc.. @@ -88,8 +99,19 @@ def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, :return If there is another page in the result, a mapping (e.g: dict) containing information needed to query the next page in the response. If there are no more pages in the result, return None. """ + + json_resp = response.json() + records_str = json_resp['data']['organizationExportData']['data'] + records_obj = json.loads(records_str) + if self.stream_name in records_obj['data']: + records = json.loads(records_str)['data'][f'{self.stream_name}'] + # more records need to be fetched + if len(records) == (self.pagination_limit + 1): + self.offset += 1 + return {"offset": self.offset, "limit": self.pagination_limit} + return None - + def request_headers(self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None) -> Mapping[str, Any]: """Add the authorization token in the headers""" return {'authorization': self.credentials['access_token'], 'Content-Type': 'application/json'} @@ -98,118 +120,66 @@ def request_params( self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, any] = None, next_page_token: Mapping[str, Any] = None ) -> MutableMapping[str, Any]: """ - TODO: Override this method to define any query parameters to be set. Remove this method if you don't need to define request params. + Override this method to define any query parameters to be set. Remove this method if you don't need to define request params. Usually contains common params e.g. pagination size etc. """ return {} def request_body_json(self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None) -> Mapping | None: + """Request body to post""" + + query = "query organizationExportData($filter: ExportFilter) {\n \ + organizationExportData(filter: $filter) {\n \ + data \n \ + errors { \n \ + key \n \ + message \n \ + } \n \ + } \n \ + } \n" - return {} + filter_obj = { + "startTime": "2023-01-26T11:11:11Z", + "endTime": "2023-07-04T13:13:13Z", # TODO: need to remove this once its made optional in the API + "offset": self.offset, + "limit": self.pagination_limit, + "tables": [self.stream_name] + } + + if next_page_token is not None: + filter_obj["offset"] = next_page_token["offset"] + filter_obj["limit"] = next_page_token["limit"] + + return {"query": query, "varaiables": {"filter": filter_obj}} def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: """ TODO: Override this method to define how a response is parsed. :return an iterable containing each record in the response """ - yield {} - - -class Customers(GlificStream): - """ - TODO: Change class name to match the table/data source this stream corresponds to. - """ - - # TODO: Fill in the primary key. Required. This is usually a unique field in the stream, like an ID or a timestamp. - primary_key = "customer_id" - - def path( - self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None - ) -> str: - """ - TODO: Override this method to define the path this stream corresponds to. E.g. if the url is https://example-api.com/v1/customers then this - should return "customers". Required. - """ - return "customers" - - -# Basic incremental stream -class IncrementalGlificStream(GlificStream, ABC): - """ - TODO fill in details of this class to implement functionality related to incremental syncs for your connector. - if you do not need to implement incremental sync for any streams, remove this class. - """ - - # TODO: Fill in to checkpoint stream reads after N records. This prevents re-reading of data if the stream fails for any reason. - state_checkpoint_interval = None - - @property - def cursor_field(self) -> str: - """ - TODO - Override to return the cursor field used by this stream e.g: an API entity might always use created_at as the cursor field. This is - usually id or date based. This field's presence tells the framework this in an incremental stream. Required for incremental. - - :return str: The name of the cursor field. - """ - return [] - - def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]) -> Mapping[str, Any]: - """ - Override to determine the latest state after reading the latest record. This typically compared the cursor_field from the latest record and - the current state and picks the 'most' recent cursor. This is how a stream's state is determined. Required for incremental. - """ - return {} - - -class Employees(IncrementalGlificStream): - """ - TODO: Change class name to match the table/data source this stream corresponds to. - """ - - # TODO: Fill in the cursor_field. Required. - cursor_field = "start_date" - - # TODO: Fill in the primary key. Required. This is usually a unique field in the stream, like an ID or a timestamp. - primary_key = "employee_id" - - def path(self, **kwargs) -> str: - """ - TODO: Override this method to define the path this stream corresponds to. E.g. if the url is https://example-api.com/v1/employees then this should - return "single". Required. - """ - return "employees" - - def stream_slices(self, stream_state: Mapping[str, Any] = None, **kwargs) -> Iterable[Optional[Mapping[str, any]]]: - """ - TODO: Optionally override this method to define this stream's slices. If slicing is not needed, delete this method. - - Slices control when state is saved. Specifically, state is saved after a slice has been fully read. - This is useful if the API offers reads by groups or filters, and can be paired with the state object to make reads efficient. See the "concepts" - section of the docs for more information. - - The function is called before reading any records in a stream. It returns an Iterable of dicts, each containing the - necessary data to craft a request for a slice. The stream state is usually referenced to determine what slices need to be created. - This means that data in a slice is usually closely related to a stream's cursor_field and stream_state. - - An HTTP request is made for each returned slice. The same slice can be accessed in the path, request_params and request_header functions to help - craft that specific request. - - For example, if https://example-api.com/v1/employees offers a date query params that returns data for that particular day, one way to implement - this would be to consult the stream state object for the last synced date, then return a slice containing each date from the last synced date - till now. The request_params function would then grab the date from the stream_slice and make it part of the request by injecting it into - the date query param. - """ - raise NotImplementedError("Implement stream slices or delete this method!") + json_resp = response.json() + records_str = json_resp['data']['organizationExportData']['data'] + records_obj = json.loads(records_str) + if self.stream_name in records_obj['data']: + records = json.loads(records_str)['data'][f'{self.stream_name}'] + col_names = records[0].split(',') + for i in range(1, len(records)): # each record + record = {} + for j, col_val in enumerate(records[i].split(',')): # each col_val + record[col_names[j]] = col_val + yield record # Source class SourceGlific(AbstractSource): + """Glific source""" + API_URL = "https://api.staging.tides.coloredcow.com/api" + PAGINATION_LIMIT = 500 def check_connection(self, logger, config) -> Tuple[bool, any]: """ - TODO: Implement a connection check to validate that the user-provided config can be used to connect to the underlying API + Implement a connection check to validate that the user-provided config can be used to connect to the underlying API See https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connectors/source-stripe/source_stripe/source.py#L232 for an example. @@ -245,8 +215,6 @@ def check_connection(self, logger, config) -> Tuple[bool, any]: def streams(self, config: Mapping[str, Any]) -> List[Stream]: """ - TODO: Replace the streams below with your own streams. - :param config: A Mapping of the user input configuration as defined in the connector spec. """ @@ -261,7 +229,7 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: try: response = requests.post(endpoint, json=auth_payload, timeout=30) response.raise_for_status() - credentials = response['data'] + credentials = response.json()['data'] except requests.exceptions.HTTPError: # return empty zero streams since authentication failed return [] @@ -284,7 +252,7 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: config = json.loads(data['data']['organizationExportConfig']['data']) streams = [] for table in config['tables']: - stream_obj = GlificStream(table, self.API_URL, credentials) + stream_obj = GlificStream(table, self.API_URL, self.PAGINATION_LIMIT, credentials) streams.append(stream_obj) return streams diff --git a/airbyte-integrations/connectors/source-glific/source_glific/spec.yaml b/airbyte-integrations/connectors/source-glific/source_glific/spec.yaml index 83de8d98e8ed..96f48084dcfa 100644 --- a/airbyte-integrations/connectors/source-glific/source_glific/spec.yaml +++ b/airbyte-integrations/connectors/source-glific/source_glific/spec.yaml @@ -6,6 +6,7 @@ connectionSpecification: required: - phone - password + - start_time properties: phone: type: string @@ -18,3 +19,8 @@ connectionSpecification: description: Password to authenticate into the your glific account airbyte_secret: true order: 1 + start_time: + type: string + title: Start Time + description: Start Time from which to pull the data + order: 2 From 33e0334758d63a4c9835bbc4e88ff91cb273fb02 Mon Sep 17 00:00:00 2001 From: Ishan Date: Mon, 17 Jul 2023 11:14:47 +0530 Subject: [PATCH 04/38] updated configured catalog for testing --- .../integration_tests/configured_catalog.json | 31 +++++++------------ 1 file changed, 12 insertions(+), 19 deletions(-) diff --git a/airbyte-integrations/connectors/source-glific/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-glific/integration_tests/configured_catalog.json index 36f0468db0d8..64573be77e9a 100644 --- a/airbyte-integrations/connectors/source-glific/integration_tests/configured_catalog.json +++ b/airbyte-integrations/connectors/source-glific/integration_tests/configured_catalog.json @@ -1,22 +1,15 @@ { "streams": [ - { - "stream": { - "name": "customers", - "json_schema": {}, - "supported_sync_modes": ["full_refresh"] - }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" - }, - { - "stream": { - "name": "employees", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"] - }, - "sync_mode": "incremental", - "destination_sync_mode": "append" - } + { + "stream": { + "name": "messages", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["id"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } ] -} +} \ No newline at end of file From 2b9185d6146d05bf12a270bc6c3e2414710edf80 Mon Sep 17 00:00:00 2001 From: Ishan Date: Mon, 17 Jul 2023 11:19:34 +0530 Subject: [PATCH 05/38] added start time and fixed key errors while parsing data --- .../connectors/source-glific/setup.py | 2 +- .../source-glific/source_glific/source.py | 107 ++++++++---------- .../source-glific/source_glific/spec.yaml | 2 + 3 files changed, 52 insertions(+), 59 deletions(-) diff --git a/airbyte-integrations/connectors/source-glific/setup.py b/airbyte-integrations/connectors/source-glific/setup.py index 50fe242637c9..8da812411f48 100644 --- a/airbyte-integrations/connectors/source-glific/setup.py +++ b/airbyte-integrations/connectors/source-glific/setup.py @@ -6,7 +6,7 @@ from setuptools import find_packages, setup MAIN_REQUIREMENTS = [ - "airbyte-cdk~=0.2", "sgqlc" + "airbyte-cdk~=0.2" ] TEST_REQUIREMENTS = [ diff --git a/airbyte-integrations/connectors/source-glific/source_glific/source.py b/airbyte-integrations/connectors/source-glific/source_glific/source.py index 5b907b17da5f..3028b7ce5365 100644 --- a/airbyte-integrations/connectors/source-glific/source_glific/source.py +++ b/airbyte-integrations/connectors/source-glific/source_glific/source.py @@ -12,23 +12,22 @@ from airbyte_cdk.sources import AbstractSource from airbyte_cdk.sources.streams import Stream from airbyte_cdk.sources.streams.http import HttpStream -from requests.auth import AuthBase from sgqlc.endpoint.http import HTTPEndpoint -""" -TODO: Most comments in this class are instructive and should be deleted after the source is implemented. -This file provides a stubbed example of how to use the Airbyte CDK to develop both a source connector which supports full refresh or and an -incremental syncs from an HTTP API. - -The various TODOs are both implementation hints and steps - fulfilling all the TODOs should be sufficient to implement one basic and one incremental -stream from a source. This pattern is the same one used by Airbyte internally to implement connectors. - -The approach here is not authoritative, and devs are free to use their own judgement. - -There are additional required TODOs in the files within the integration_tests folder and the spec.yaml file. -""" +stream_json_schema = { + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "additionalProperties": True, + "properties": { + "id": { + "type": [ + "number", + ] + } + }, +} # Basic full refresh stream class GlificStream(HttpStream, ABC): @@ -59,13 +58,14 @@ class GlificStream(HttpStream, ABC): See the reference docs for the full list of configurable options. """ - def __init__(self, stream_name: str, url_base: str, pagination_limit: int, credentials: dict, **kwargs): + def __init__(self, stream_name: str, url_base: str, pagination_limit: int, credentials: dict, config: dict, **kwargs): super().__init__(**kwargs) self.stream_name = stream_name self.api_url = url_base self.credentials = credentials self.pagination_limit = pagination_limit + self.start_time = config['start_time'] self.offset = 0 @property @@ -81,6 +81,10 @@ def http_method(self) -> str: """All requests in the glific stream are posts with body""" return "POST" + def get_json_schema(self) -> dict: + """Return json schema of each stream""" + return stream_json_schema + def path(self, *, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None) -> str: return "" @@ -101,14 +105,15 @@ def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, """ json_resp = response.json() - records_str = json_resp['data']['organizationExportData']['data'] - records_obj = json.loads(records_str) - if self.stream_name in records_obj['data']: - records = json.loads(records_str)['data'][f'{self.stream_name}'] - # more records need to be fetched - if len(records) == (self.pagination_limit + 1): - self.offset += 1 - return {"offset": self.offset, "limit": self.pagination_limit} + if json_resp['data']['organizationExportData'] is not None: + records_str = json_resp['data']['organizationExportData']['data'] + records_obj = json.loads(records_str) + if self.stream_name in records_obj['data']: + records = json.loads(records_str)['data'][f'{self.stream_name}'] + # more records need to be fetched + if len(records) == (self.pagination_limit + 1): + self.offset += 1 + return {"offset": self.offset, "limit": self.pagination_limit} return None @@ -116,31 +121,14 @@ def request_headers(self, stream_state: Mapping[str, Any], stream_slice: Mapping """Add the authorization token in the headers""" return {'authorization': self.credentials['access_token'], 'Content-Type': 'application/json'} - def request_params( - self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, any] = None, next_page_token: Mapping[str, Any] = None - ) -> MutableMapping[str, Any]: - """ - Override this method to define any query parameters to be set. Remove this method if you don't need to define request params. - Usually contains common params e.g. pagination size etc. - """ - return {} - def request_body_json(self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None) -> Mapping | None: + def request_body_json(self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None) -> Mapping: """Request body to post""" - query = "query organizationExportData($filter: ExportFilter) {\n \ - organizationExportData(filter: $filter) {\n \ - data \n \ - errors { \n \ - key \n \ - message \n \ - } \n \ - } \n \ - } \n" + query = "query organizationExportData($filter: ExportFilter) { organizationExportData(filter: $filter) {data errors { key message } } }" filter_obj = { - "startTime": "2023-01-26T11:11:11Z", - "endTime": "2023-07-04T13:13:13Z", # TODO: need to remove this once its made optional in the API + "startTime": self.start_time, "offset": self.offset, "limit": self.pagination_limit, "tables": [self.stream_name] @@ -150,24 +138,27 @@ def request_body_json(self, stream_state: Mapping[str, Any], stream_slice: Mappi filter_obj["offset"] = next_page_token["offset"] filter_obj["limit"] = next_page_token["limit"] - return {"query": query, "varaiables": {"filter": filter_obj}} + return {"query": query, "variables": {"filter": filter_obj}} def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: """ - TODO: Override this method to define how a response is parsed. + Override this method to define how a response is parsed. :return an iterable containing each record in the response """ json_resp = response.json() - records_str = json_resp['data']['organizationExportData']['data'] - records_obj = json.loads(records_str) - if self.stream_name in records_obj['data']: - records = json.loads(records_str)['data'][f'{self.stream_name}'] - col_names = records[0].split(',') - for i in range(1, len(records)): # each record - record = {} - for j, col_val in enumerate(records[i].split(',')): # each col_val - record[col_names[j]] = col_val - yield record + if json_resp['data']['organizationExportData'] is not None: + records_str = json_resp['data']['organizationExportData']['data'] + records_obj = json.loads(records_str) + if self.stream_name in records_obj['data']: + records = json.loads(records_str)['data'][f'{self.stream_name}'] + col_names = records[0].split(',') + print("NOOO OFFF COOLLLSS", len(col_names)) + for i in range(1, len(records)): # each record + record = {} + print("RECORD NOO OOFFF VALLLSS",i, len(records[i].split(','))) + for j, col_val in enumerate(records[i].split(',')): # each col_val + record[col_names[j]] = col_val + yield record # Source class SourceGlific(AbstractSource): @@ -247,12 +238,12 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: except requests.exceptions.HTTPError: # return empty zero streams since config could not be fetched return [] - + # construct streams - config = json.loads(data['data']['organizationExportConfig']['data']) + export_config = json.loads(data['data']['organizationExportConfig']['data']) streams = [] - for table in config['tables']: - stream_obj = GlificStream(table, self.API_URL, self.PAGINATION_LIMIT, credentials) + for table in export_config['tables']: + stream_obj = GlificStream(table, self.API_URL, self.PAGINATION_LIMIT, credentials, config) streams.append(stream_obj) return streams diff --git a/airbyte-integrations/connectors/source-glific/source_glific/spec.yaml b/airbyte-integrations/connectors/source-glific/source_glific/spec.yaml index 96f48084dcfa..2e649192c6f2 100644 --- a/airbyte-integrations/connectors/source-glific/source_glific/spec.yaml +++ b/airbyte-integrations/connectors/source-glific/source_glific/spec.yaml @@ -24,3 +24,5 @@ connectionSpecification: title: Start Time description: Start Time from which to pull the data order: 2 + default: "2023-01-26T11:11:11Z" + pattern: ^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$ From 65890a86b1a2e139e12f4ffe123973f4a92e47b2 Mon Sep 17 00:00:00 2001 From: Ishan Date: Mon, 17 Jul 2023 11:38:58 +0530 Subject: [PATCH 06/38] graphQL client not needed --- .../connectors/source-glific/source_glific/source.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/airbyte-integrations/connectors/source-glific/source_glific/source.py b/airbyte-integrations/connectors/source-glific/source_glific/source.py index 3028b7ce5365..b76e989ab79b 100644 --- a/airbyte-integrations/connectors/source-glific/source_glific/source.py +++ b/airbyte-integrations/connectors/source-glific/source_glific/source.py @@ -12,7 +12,6 @@ from airbyte_cdk.sources import AbstractSource from airbyte_cdk.sources.streams import Stream from airbyte_cdk.sources.streams.http import HttpStream -from sgqlc.endpoint.http import HTTPEndpoint @@ -232,9 +231,11 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: try: query = 'query organizationExportConfig { organizationExportConfig { data errors { key message } } }' variables = {} + payload = {"query": query, "variables": variables} - endpoint = HTTPEndpoint(endpoint, headers) - data = endpoint(query, variables) + res = requests.post(endpoint, headers=headers, json=payload, timeout=30) + res.raise_for_status() + data = res.json() except requests.exceptions.HTTPError: # return empty zero streams since config could not be fetched return [] From e6867ca1d92c32a3e694e733b4ac05d093d4311a Mon Sep 17 00:00:00 2001 From: Ishan Date: Mon, 17 Jul 2023 12:26:57 +0530 Subject: [PATCH 07/38] minor fixes --- .../integration_tests/configured_catalog.json | 198 +++++++++++++++++- .../source-glific/source_glific/source.py | 6 +- 2 files changed, 190 insertions(+), 14 deletions(-) diff --git a/airbyte-integrations/connectors/source-glific/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-glific/integration_tests/configured_catalog.json index 64573be77e9a..1831d0ecb926 100644 --- a/airbyte-integrations/connectors/source-glific/integration_tests/configured_catalog.json +++ b/airbyte-integrations/connectors/source-glific/integration_tests/configured_catalog.json @@ -1,15 +1,191 @@ { "streams": [ - { - "stream": { - "name": "messages", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["id"] - }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" - } + { + "cursor_field": null, + "destination_sync_mode": "append", + "primary_key": null, + "stream": { + "default_cursor_field": null, + "json_schema": {}, + "name": "contacts", + "namespace": null, + "source_defined_cursor": null, + "source_defined_primary_key": null, + "supported_sync_modes": [ + "full_refresh" + ] + }, + "sync_mode": "full_refresh" + }, + { + "cursor_field": null, + "destination_sync_mode": "append", + "primary_key": null, + "stream": { + "default_cursor_field": null, + "json_schema": {}, + "name": "messages", + "namespace": null, + "source_defined_cursor": null, + "source_defined_primary_key": null, + "supported_sync_modes": [ + "full_refresh" + ] + }, + "sync_mode": "full_refresh" + }, + { + "cursor_field": null, + "destination_sync_mode": "append", + "primary_key": null, + "stream": { + "default_cursor_field": null, + "json_schema": {}, + "name": "messages_media", + "namespace": null, + "source_defined_cursor": null, + "source_defined_primary_key": null, + "supported_sync_modes": [ + "full_refresh" + ] + }, + "sync_mode": "full_refresh" + }, + { + "cursor_field": null, + "destination_sync_mode": "append", + "primary_key": null, + "stream": { + "default_cursor_field": null, + "json_schema": {}, + "name": "locations", + "namespace": null, + "source_defined_cursor": null, + "source_defined_primary_key": null, + "supported_sync_modes": [ + "full_refresh" + ] + }, + "sync_mode": "full_refresh" + }, + { + "cursor_field": null, + "destination_sync_mode": "append", + "primary_key": null, + "stream": { + "default_cursor_field": null, + "json_schema": {}, + "name": "flows", + "namespace": null, + "source_defined_cursor": null, + "source_defined_primary_key": null, + "supported_sync_modes": [ + "full_refresh" + ] + }, + "sync_mode": "full_refresh" + }, + { + "cursor_field": null, + "destination_sync_mode": "append", + "primary_key": null, + "stream": { + "default_cursor_field": null, + "json_schema": {}, + "name": "flow_results", + "namespace": null, + "source_defined_cursor": null, + "source_defined_primary_key": null, + "supported_sync_modes": [ + "full_refresh" + ] + }, + "sync_mode": "full_refresh" + }, + { + "cursor_field": null, + "destination_sync_mode": "append", + "primary_key": null, + "stream": { + "default_cursor_field": null, + "json_schema": {}, + "name": "groups", + "namespace": null, + "source_defined_cursor": null, + "source_defined_primary_key": null, + "supported_sync_modes": [ + "full_refresh" + ] + }, + "sync_mode": "full_refresh" + }, + { + "cursor_field": null, + "destination_sync_mode": "append", + "primary_key": null, + "stream": { + "default_cursor_field": null, + "json_schema": {}, + "name": "interactive_templates", + "namespace": null, + "source_defined_cursor": null, + "source_defined_primary_key": null, + "supported_sync_modes": [ + "full_refresh" + ] + }, + "sync_mode": "full_refresh" + }, + { + "cursor_field": null, + "destination_sync_mode": "append", + "primary_key": null, + "stream": { + "default_cursor_field": null, + "json_schema": {}, + "name": "organizations", + "namespace": null, + "source_defined_cursor": null, + "source_defined_primary_key": null, + "supported_sync_modes": [ + "full_refresh" + ] + }, + "sync_mode": "full_refresh" + }, + { + "cursor_field": null, + "destination_sync_mode": "append", + "primary_key": null, + "stream": { + "default_cursor_field": null, + "json_schema": {}, + "name": "organization_data", + "namespace": null, + "source_defined_cursor": null, + "source_defined_primary_key": null, + "supported_sync_modes": [ + "full_refresh" + ] + }, + "sync_mode": "full_refresh" + }, + { + "cursor_field": null, + "destination_sync_mode": "append", + "primary_key": null, + "stream": { + "default_cursor_field": null, + "json_schema": {}, + "name": "profiles", + "namespace": null, + "source_defined_cursor": null, + "source_defined_primary_key": null, + "supported_sync_modes": [ + "full_refresh" + ] + }, + "sync_mode": "full_refresh" + } ] } \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-glific/source_glific/source.py b/airbyte-integrations/connectors/source-glific/source_glific/source.py index b76e989ab79b..543624f42412 100644 --- a/airbyte-integrations/connectors/source-glific/source_glific/source.py +++ b/airbyte-integrations/connectors/source-glific/source_glific/source.py @@ -233,9 +233,9 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: variables = {} payload = {"query": query, "variables": variables} - res = requests.post(endpoint, headers=headers, json=payload, timeout=30) - res.raise_for_status() - data = res.json() + response = requests.post(endpoint, headers=headers, json=payload, timeout=30) + response.raise_for_status() + data = response.json() except requests.exceptions.HTTPError: # return empty zero streams since config could not be fetched return [] From 84358b359bd41f9307948a6868da25788e5a27fd Mon Sep 17 00:00:00 2001 From: Aviraj Gour Date: Tue, 1 Aug 2023 16:57:14 +0530 Subject: [PATCH 08/38] Avni Intial Commit --- .../connectors/source-avni/.dockerignore | 6 + .../connectors/source-avni/Dockerfile | 38 ++++ .../connectors/source-avni/README.md | 138 ++++++++++++++ .../source-avni/acceptance-test-config.yml | 31 +++ .../source-avni/acceptance-test-docker.sh | 3 + .../connectors/source-avni/build.gradle | 9 + .../source-avni/integration_tests/__init__.py | 3 + .../integration_tests/abnormal_state.json | 14 ++ .../integration_tests/acceptance.py | 16 ++ .../integration_tests/configured_catalog.json | 60 ++++++ .../integration_tests/invalid_config.json | 5 + .../integration_tests/sample_config.json | 5 + .../integration_tests/sample_state.json | 14 ++ .../connectors/source-avni/main.py | 13 ++ .../connectors/source-avni/metadata.yaml | 19 ++ .../connectors/source-avni/requirements.txt | 2 + .../connectors/source-avni/setup.py | 30 +++ .../source-avni/source_avni/__init__.py | 8 + .../source_avni/schemas/encounters.json | 96 ++++++++++ .../schemas/programEncounters.json | 105 ++++++++++ .../schemas/programEnrolments.json | 92 +++++++++ .../source_avni/schemas/subjects.json | 109 +++++++++++ .../source-avni/source_avni/source.py | 180 ++++++++++++++++++ .../source-avni/source_avni/spec.yaml | 23 +++ .../source-avni/unit_tests/__init__.py | 3 + .../unit_tests/test_incremental_streams.py | 61 ++++++ .../source-avni/unit_tests/test_source.py | 38 ++++ .../source-avni/unit_tests/test_streams.py | 97 ++++++++++ docs/integrations/sources/avni.md | 47 +++++ 29 files changed, 1265 insertions(+) create mode 100644 airbyte-integrations/connectors/source-avni/.dockerignore create mode 100644 airbyte-integrations/connectors/source-avni/Dockerfile create mode 100644 airbyte-integrations/connectors/source-avni/README.md create mode 100644 airbyte-integrations/connectors/source-avni/acceptance-test-config.yml create mode 100755 airbyte-integrations/connectors/source-avni/acceptance-test-docker.sh create mode 100644 airbyte-integrations/connectors/source-avni/build.gradle create mode 100644 airbyte-integrations/connectors/source-avni/integration_tests/__init__.py create mode 100644 airbyte-integrations/connectors/source-avni/integration_tests/abnormal_state.json create mode 100644 airbyte-integrations/connectors/source-avni/integration_tests/acceptance.py create mode 100644 airbyte-integrations/connectors/source-avni/integration_tests/configured_catalog.json create mode 100644 airbyte-integrations/connectors/source-avni/integration_tests/invalid_config.json create mode 100644 airbyte-integrations/connectors/source-avni/integration_tests/sample_config.json create mode 100644 airbyte-integrations/connectors/source-avni/integration_tests/sample_state.json create mode 100644 airbyte-integrations/connectors/source-avni/main.py create mode 100644 airbyte-integrations/connectors/source-avni/metadata.yaml create mode 100644 airbyte-integrations/connectors/source-avni/requirements.txt create mode 100644 airbyte-integrations/connectors/source-avni/setup.py create mode 100644 airbyte-integrations/connectors/source-avni/source_avni/__init__.py create mode 100644 airbyte-integrations/connectors/source-avni/source_avni/schemas/encounters.json create mode 100644 airbyte-integrations/connectors/source-avni/source_avni/schemas/programEncounters.json create mode 100644 airbyte-integrations/connectors/source-avni/source_avni/schemas/programEnrolments.json create mode 100644 airbyte-integrations/connectors/source-avni/source_avni/schemas/subjects.json create mode 100644 airbyte-integrations/connectors/source-avni/source_avni/source.py create mode 100644 airbyte-integrations/connectors/source-avni/source_avni/spec.yaml create mode 100644 airbyte-integrations/connectors/source-avni/unit_tests/__init__.py create mode 100644 airbyte-integrations/connectors/source-avni/unit_tests/test_incremental_streams.py create mode 100644 airbyte-integrations/connectors/source-avni/unit_tests/test_source.py create mode 100644 airbyte-integrations/connectors/source-avni/unit_tests/test_streams.py create mode 100644 docs/integrations/sources/avni.md diff --git a/airbyte-integrations/connectors/source-avni/.dockerignore b/airbyte-integrations/connectors/source-avni/.dockerignore new file mode 100644 index 000000000000..3fcbe3fc3f0b --- /dev/null +++ b/airbyte-integrations/connectors/source-avni/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_avni +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-avni/Dockerfile b/airbyte-integrations/connectors/source-avni/Dockerfile new file mode 100644 index 000000000000..81bafe51cb7c --- /dev/null +++ b/airbyte-integrations/connectors/source-avni/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.13-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_avni ./source_avni + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-avni diff --git a/airbyte-integrations/connectors/source-avni/README.md b/airbyte-integrations/connectors/source-avni/README.md new file mode 100644 index 000000000000..b49197bce903 --- /dev/null +++ b/airbyte-integrations/connectors/source-avni/README.md @@ -0,0 +1,138 @@ +# Avni Source + +This is the repository for the Avni source connector, written in Python. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.com/integrations/sources/avni). + +## Local development + +### Prerequisites +**To iterate on this connector, make sure to complete this prerequisites section.** + +#### Minimum Python version required `= 3.9.0` + +#### Build & Activate Virtual Environment and install dependencies +From this connector directory, create a virtual environment: +``` +python -m venv .venv +``` + +This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your +development environment of choice. To activate it from the terminal, run: +``` +source .venv/bin/activate +pip install -r requirements.txt +pip install '.[tests]' +``` +If you are in an IDE, follow your IDE's instructions to activate the virtualenv. + +Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is +used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. +If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything +should work as you expect. + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-avni:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.com/integrations/sources/avni) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_avni/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source avni test creds` +and place them into `secrets/config.json`. + +### Locally running the connector +``` +python main.py spec +python main.py check --config secrets/config.json +python main.py discover --config secrets/config.json +python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json +``` + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-avni:dev +``` + +If you want to build the Docker image with the CDK on your local machine (rather than the most recent package published to pypi), from the airbyte base directory run: +```bash +CONNECTOR_TAG= CONNECTOR_NAME= sh airbyte-integrations/scripts/build-connector-image-with-local-cdk.sh +``` + + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-avni:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-avni:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-avni:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-avni:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-avni:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing +Make sure to familiarize yourself with [pytest test discovery](https://docs.pytest.org/en/latest/goodpractices.html#test-discovery) to know how your test files and methods should be named. +First install test dependencies into your virtual environment: +``` +pip install .[tests] +``` +### Unit Tests +To run unit tests locally, from the connector directory run: +``` +python -m pytest unit_tests +``` + +### Integration Tests +There are two types of integration tests: Acceptance Tests (Airbyte's test suite for all source connectors) and custom integration tests (which are specific to this connector). +#### Custom Integration tests +Place custom tests inside `integration_tests/` folder, then, from the connector root, run +``` +python -m pytest integration_tests +``` +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. +To run your integration tests with acceptance tests, from the connector root, run +``` +python -m pytest integration_tests -p integration_tests.acceptance +``` +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-avni:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-avni:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-avni/acceptance-test-config.yml b/airbyte-integrations/connectors/source-avni/acceptance-test-config.yml new file mode 100644 index 000000000000..da3132808563 --- /dev/null +++ b/airbyte-integrations/connectors/source-avni/acceptance-test-config.yml @@ -0,0 +1,31 @@ +# See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-avni:0.1.0 +acceptance_tests: + spec: + tests: + - spec_path: "source_avni/spec.yaml" + connection: + tests: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + tests: + - config_path: "secrets/config.json" + basic_read: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] + incremental: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + future_state: + future_state_path: "integration_tests/abnormal_state.json" + full_refresh: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-avni/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-avni/acceptance-test-docker.sh new file mode 100755 index 000000000000..b6d65deeccb4 --- /dev/null +++ b/airbyte-integrations/connectors/source-avni/acceptance-test-docker.sh @@ -0,0 +1,3 @@ +#!/usr/bin/env sh + +source "$(git rev-parse --show-toplevel)/airbyte-integrations/bases/connector-acceptance-test/acceptance-test-docker.sh" diff --git a/airbyte-integrations/connectors/source-avni/build.gradle b/airbyte-integrations/connectors/source-avni/build.gradle new file mode 100644 index 000000000000..8b0332499806 --- /dev/null +++ b/airbyte-integrations/connectors/source-avni/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-connector-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_avni' +} diff --git a/airbyte-integrations/connectors/source-avni/integration_tests/__init__.py b/airbyte-integrations/connectors/source-avni/integration_tests/__init__.py new file mode 100644 index 000000000000..c941b3045795 --- /dev/null +++ b/airbyte-integrations/connectors/source-avni/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-avni/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-avni/integration_tests/abnormal_state.json new file mode 100644 index 000000000000..25197515a9d0 --- /dev/null +++ b/airbyte-integrations/connectors/source-avni/integration_tests/abnormal_state.json @@ -0,0 +1,14 @@ +{ + "subjects": { + "Last modified at":"2200-06-27T04:18:36.914Z" + }, + "programEnrolments": { + "Last modified at":"2200-06-27T04:18:36.914Z" + }, + "programEncounters": { + "Last modified at":"2200-06-27T04:18:36.914Z" + }, + "encounters": { + "Last modified at":"2200-06-27T04:18:36.914Z" + } +} diff --git a/airbyte-integrations/connectors/source-avni/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-avni/integration_tests/acceptance.py new file mode 100644 index 000000000000..9e6409236281 --- /dev/null +++ b/airbyte-integrations/connectors/source-avni/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("connector_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-avni/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-avni/integration_tests/configured_catalog.json new file mode 100644 index 000000000000..e7e42dcc322f --- /dev/null +++ b/airbyte-integrations/connectors/source-avni/integration_tests/configured_catalog.json @@ -0,0 +1,60 @@ +{ + "streams": [ + { + "stream": { + "name": "subjects", + "json_schema": {}, + "supported_sync_modes": ["full_refresh","incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["Last modified at"], + "source_defined_primary_key": [["ID"]] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append_dedup", + "cursor_field": ["audit","Last modified at"], + "primary_key": [["ID"]] + }, + { + "stream": { + "name": "programEnrolments", + "json_schema": {}, + "supported_sync_modes": ["full_refresh","incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["Last modified at"], + "source_defined_primary_key": [["ID"]] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append_dedup", + "cursor_field": ["audit","Last modified at"], + "primary_key": [["ID"]] + }, + { + "stream": { + "name": "programEncounters", + "json_schema": {}, + "supported_sync_modes": ["full_refresh","incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["Last modified at"], + "source_defined_primary_key": [["ID"]] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append_dedup", + "cursor_field": ["audit","Last modified at"], + "primary_key": [["ID"]] + }, + { + "stream": { + "name": "encounters", + "json_schema": {}, + "supported_sync_modes": ["full_refresh","incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["Last modified at"], + "source_defined_primary_key": [["ID"]] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append_dedup", + "cursor_field": ["audit","Last modified at"], + "primary_key": [["ID"]] + } + ] +} diff --git a/airbyte-integrations/connectors/source-avni/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-avni/integration_tests/invalid_config.json new file mode 100644 index 000000000000..6ab0009045de --- /dev/null +++ b/airbyte-integrations/connectors/source-avni/integration_tests/invalid_config.json @@ -0,0 +1,5 @@ +{ + "username": "avni", + "password": "test", + "start_date": "2000-06-27T04:18:36.914Z" +} diff --git a/airbyte-integrations/connectors/source-avni/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-avni/integration_tests/sample_config.json new file mode 100644 index 000000000000..37c2f075deaa --- /dev/null +++ b/airbyte-integrations/connectors/source-avni/integration_tests/sample_config.json @@ -0,0 +1,5 @@ +{ + "username": "Username", + "password": "password", + "start_date": "2000-06-27T04:18:36.914Z" +} diff --git a/airbyte-integrations/connectors/source-avni/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-avni/integration_tests/sample_state.json new file mode 100644 index 000000000000..7a8c5a6aa518 --- /dev/null +++ b/airbyte-integrations/connectors/source-avni/integration_tests/sample_state.json @@ -0,0 +1,14 @@ +{ + "subjects": { + "Last modified at":"2000-06-27T04:18:36.914Z" + }, + "programEnrolments": { + "Last modified at":"2000-06-27T04:18:36.914Z" + }, + "programEncounters": { + "Last modified at":"2000-06-27T04:18:36.914Z" + }, + "encounters": { + "Last modified at":"2000-06-27T04:18:36.914Z" + } +} diff --git a/airbyte-integrations/connectors/source-avni/main.py b/airbyte-integrations/connectors/source-avni/main.py new file mode 100644 index 000000000000..5ab8e86addc5 --- /dev/null +++ b/airbyte-integrations/connectors/source-avni/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_avni import SourceAvni + +if __name__ == "__main__": + source = SourceAvni() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-avni/metadata.yaml b/airbyte-integrations/connectors/source-avni/metadata.yaml new file mode 100644 index 000000000000..8834761d1e3c --- /dev/null +++ b/airbyte-integrations/connectors/source-avni/metadata.yaml @@ -0,0 +1,19 @@ +data: + allowedHosts: + hosts: + - "*" # Please change to the hostname of the source. + registries: + oss: + enabled: false + connectorSubtype: api + connectorType: source + definitionId: a4adf548-9f40-4eb7-958f-9ff322abd481 + dockerImageTag: 0.1.0 + dockerRepository: airbyte/source-avni + githubIssueLabel: source-avni + icon: avni.svg + license: MIT + name: Avni + releaseStage: alpha + supportUrl: https://docs.airbyte.com/integrations/sources/avni +metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-avni/requirements.txt b/airbyte-integrations/connectors/source-avni/requirements.txt new file mode 100644 index 000000000000..cc57334ef619 --- /dev/null +++ b/airbyte-integrations/connectors/source-avni/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/connector-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-avni/setup.py b/airbyte-integrations/connectors/source-avni/setup.py new file mode 100644 index 000000000000..9f3fd1532f27 --- /dev/null +++ b/airbyte-integrations/connectors/source-avni/setup.py @@ -0,0 +1,30 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.2", + "boto3==1.18.0", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.2", + "pytest-mock~=3.6.1", + "connector-acceptance-test", +] + +setup( + name="source_avni", + description="Source implementation for Avni.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-avni/source_avni/__init__.py b/airbyte-integrations/connectors/source-avni/source_avni/__init__.py new file mode 100644 index 000000000000..93eb8dbfdf50 --- /dev/null +++ b/airbyte-integrations/connectors/source-avni/source_avni/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceAvni + +__all__ = ["SourceAvni"] diff --git a/airbyte-integrations/connectors/source-avni/source_avni/schemas/encounters.json b/airbyte-integrations/connectors/source-avni/source_avni/schemas/encounters.json new file mode 100644 index 000000000000..92fed7759f76 --- /dev/null +++ b/airbyte-integrations/connectors/source-avni/source_avni/schemas/encounters.json @@ -0,0 +1,96 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "additionalProperties": true, + "properties": { + "ID": { + "type": "string" + }, + "External ID": { + "type": ["null", "string"] + }, + "Voided": { + "type": "boolean" + }, + "Encounter type": { + "type": ["null", "string"] + }, + "Subject ID": { + "type": "string" + }, + "Subject type": { + "type": "string" + }, + "Subject external ID": { + "type": ["null", "string"] + }, + "Encounter date time": { + "type": ["null", "string"], + "format": "YYYY-MM-DDTHH:mm:ss.sssZ" + }, + "Encounter location": { + "type": ["null", "object"], + "additionalProperties": true, + "properties": { + "X": { + "type": ["null", "number"] + }, + "Y": { + "type": ["null", "number"] + } + } + }, + "Earliest scheduled date": { + "type": ["null", "string"], + "format": "YYYY-MM-DDTHH:mm:ss.sssZ" + }, + "Max scheduled date": { + "type": ["null", "string"], + "format": "YYYY-MM-DDTHH:mm:ss.sssZ" + }, + "observations": { + "type": ["null", "object"], + "additionalProperties": true + }, + "Cancel location": { + "type": ["null", "object"], + "additionalProperties": true, + "properties": { + "X": { + "type": ["null", "number"] + }, + "Y": { + "type": ["null", "number"] + } + } + }, + "Cancel date time": { + "type": ["null", "string"], + "format": "YYYY-MM-DDTHH:mm:ss.sssZ" + }, + "cancelObservations": { + "type": ["null", "object"], + "additionalProperties": true + }, + "audit": { + "type": ["null", "object"], + "additionalProperties": true, + "properties": { + "Created at": { + "type": ["null", "string"], + "format": "YYYY-MM-DDTHH:mm:ss.sssZ" + }, + "Last modified at": { + "type": ["null", "string"], + "format": "YYYY-MM-DDTHH:mm:ss.sssZ" + }, + "Created by": { + "type": ["null", "string"] + }, + "Last modified by": { + "type": ["null", "string"] + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-avni/source_avni/schemas/programEncounters.json b/airbyte-integrations/connectors/source-avni/source_avni/schemas/programEncounters.json new file mode 100644 index 000000000000..9ed1cecd0619 --- /dev/null +++ b/airbyte-integrations/connectors/source-avni/source_avni/schemas/programEncounters.json @@ -0,0 +1,105 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "additionalProperties": true, + "properties": { + "ID": { + "type": ["null", "string"] + }, + "External ID": { + "type": ["null", "string"] + }, + "Voided": { + "type": "boolean" + }, + "Subject ID": { + "type": ["null", "string"] + }, + "Subject type": { + "type": ["null", "string"] + }, + "Subject external ID": { + "type": ["null", "string"] + }, + "Enrolment ID": { + "type": ["null", "string"] + }, + "Enrolment external ID": { + "type": ["null", "string"] + }, + "Program": { + "type": ["null", "string"] + }, + "Encounter type": { + "type": ["null", "string"] + }, + "Encounter date time": { + "type": ["null", "string"], + "format": "YYYY-MM-DDTHH:mm:ss.sssZ" + }, + "Encounter location": { + "type": ["null", "object"], + "additionalProperties": true, + "properties": { + "X": { + "type": ["null", "number"] + }, + "Y": { + "type": ["null", "number"] + } + } + }, + "Earliest scheduled date": { + "type": ["null", "string"], + "format": "YYYY-MM-DDTHH:mm:ss.sssZ" + }, + "Max scheduled date": { + "type": ["null", "string"], + "format": "YYYY-MM-DDTHH:mm:ss.sssZ" + }, + "observations": { + "type": ["null", "object"], + "additionalProperties": true + }, + "Cancel location": { + "type": ["null", "object"], + "additionalProperties": true, + "properties": { + "X": { + "type": ["null", "number"] + }, + "Y": { + "type": ["null", "number"], + "example": 74.7364501 + } + } + }, + "Cancel date time": { + "type": ["null", "string"], + "format": "YYYY-MM-DDTHH:mm:ss.sssZ" + }, + "cancelObservations": { + "type": ["null", "object"], + "additionalProperties": true + }, + "audit": { + "type": ["null", "object"], + "properties": { + "Created at": { + "type": ["null", "string"], + "format": "YYYY-MM-DDTHH:mm:ss.sssZ" + }, + "Last modified at": { + "type": ["null", "string"], + "format": "YYYY-MM-DDTHH:mm:ss.sssZ" + }, + "Created by": { + "type": ["null", "string"] + }, + "Last modified by": { + "type": ["null", "string"] + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-avni/source_avni/schemas/programEnrolments.json b/airbyte-integrations/connectors/source-avni/source_avni/schemas/programEnrolments.json new file mode 100644 index 000000000000..9b1493442d8d --- /dev/null +++ b/airbyte-integrations/connectors/source-avni/source_avni/schemas/programEnrolments.json @@ -0,0 +1,92 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "additionalProperties": true, + "properties": { + "ID": { + "type": "string" + }, + "External ID": { + "type": ["null", "string"] + }, + "Voided": { + "type": "boolean" + }, + "Subject ID": { + "type": "string" + }, + "Subject type": { + "type": "string" + }, + "Subject external ID": { + "type": ["null", "string"] + }, + "Program": { + "type": ["null", "string"] + }, + "Enrolment datetime": { + "type": ["null", "string"] + }, + "Enrolment location": { + "type": ["null", "object"], + "additionalProperties": true, + "properties": { + "X": { + "type": ["null", "number"] + }, + "Y": { + "type": ["null", "number"] + } + } + }, + "Exit datetime": { + "type": ["null", "string"] + }, + "Exit location": { + "type": ["null", "object"], + "additionalProperties": true, + "properties": { + "X": { + "type": ["null", "number"] + }, + "Y": { + "type": ["null", "number"] + } + } + }, + "observations": { + "type": ["null", "object"], + "additionalProperties": true + }, + "exitObservations": { + "type": ["null", "object"], + "additionalProperties": true + }, + "encounters": { + "type": ["null", "array"], + "items": { + "type": ["null", "string"] + } + }, + "audit": { + "type": ["null", "object"], + "additionalProperties": true, + "properties": { + "Created at": { + "type": ["null", "string"], + "format": "YYYY-MM-DDTHH:mm:ss.sssZ" + }, + "Last modified at": { + "type": ["null", "string"], + "format": "YYYY-MM-DDTHH:mm:ss.sssZ" + }, + "Created by": { + "type": ["null", "string"] + }, + "Last modified by": { + "type": ["null", "string"] + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-avni/source_avni/schemas/subjects.json b/airbyte-integrations/connectors/source-avni/source_avni/schemas/subjects.json new file mode 100644 index 000000000000..a6f3c2374231 --- /dev/null +++ b/airbyte-integrations/connectors/source-avni/source_avni/schemas/subjects.json @@ -0,0 +1,109 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "additionalProperties": true, + "properties": { + "ID": { + "type": "string" + }, + "External ID": { + "type": ["null", "string"] + }, + "Voided": { + "type": "boolean" + }, + "Subject type": { + "type": ["null", "string"] + }, + "Registration location": { + "type": ["null", "object"], + "additionalProperties": true, + "properties": { + "X": { + "type": ["null", "number"] + }, + "Y": { + "type": ["null", "number"] + } + } + }, + "Registration date": { + "type": ["null", "string"] + }, + "location": { + "type": ["null", "object"], + "additionalProperties": true + }, + "relatives": { + "type": ["null", "array"], + "items": { + "type": "object", + "additionalProperties": true, + "properties": { + "Voided": { + "type": "boolean" + }, + "Relationship type": { + "type": ["null", "string"] + }, + "Relative ID": { + "type": ["null", "string"] + }, + "Relative external ID": { + "type": ["null", "string"] + }, + "Enter date": { + "type": ["null", "string"] + }, + "Exit date": { + "type": ["null", "string"] + } + } + } + }, + "observations": { + "type": ["null", "object"], + "additionalProperties": true + }, + "encounters": { + "type": ["null", "array"], + "items": { + "type": ["null", "string"], + "format": "uuid" + } + }, + "enrolments": { + "type": ["null", "array"], + "items": { + "type": ["null", "string"], + "format": "uuid" + } + }, + "audit": { + "type": ["null", "object"], + "additionalProperties": true, + "properties": { + "Created at": { + "type": ["null", "string"], + "format": "YYYY-MM-DDTHH:mm:ss.sssZ" + }, + "Last modified at": { + "type": ["null", "string"], + "format": "YYYY-MM-DDTHH:mm:ss.sssZ" + }, + "Created by": { + "type": ["null", "string"] + }, + "Last modified by": { + "type": ["null", "string"] + } + } + }, + "Groups": { + "type": ["null", "array"], + "items": { + "type": ["null", "string"] + } + } + } +} diff --git a/airbyte-integrations/connectors/source-avni/source_avni/source.py b/airbyte-integrations/connectors/source-avni/source_avni/source.py new file mode 100644 index 000000000000..e42430e6f174 --- /dev/null +++ b/airbyte-integrations/connectors/source-avni/source_avni/source.py @@ -0,0 +1,180 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from abc import ABC +from typing import Any, Iterable, List, Mapping, MutableMapping, Optional, Tuple + +import boto3 +import requests +from airbyte_cdk.sources import AbstractSource +from airbyte_cdk.sources.streams import IncrementalMixin, Stream +from airbyte_cdk.sources.streams.http import HttpStream +from airbyte_cdk.models import SyncMode + + +class Avni(HttpStream, ABC): + + url_base = "https://app.avniproject.org/api/" + primary_key = "ID" + cursor_value = None + current_page = 0 + last_record = None + + def __init__(self, start_date: str, path , auth_token: str, **kwargs): + super().__init__(**kwargs) + + self.start_date = start_date + self.auth_token = auth_token + self.stream=path + + +class AvniStream(Avni,IncrementalMixin): + + """ + + This implement diffrent Stream in Source Avni + + Api docs : https://avni.readme.io/docs/api-guide + Api endpoints : https://app.swaggerhub.com/apis-docs/samanvay/avni-external/1.0.0 + """ + def path(self, **kwargs) -> str: + return self.stream + + def request_params( + self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> MutableMapping[str, Any]: + + params = {"lastModifiedDateTime": self.state["Last modified at"]} + if next_page_token: + params.update(next_page_token) + return params + + @property + def name(self) -> str: + return self.stream + + def request_headers( + self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> Mapping[str, Any]: + + return {"auth-token": self.auth_token} + + def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: + + data = response.json()["content"] + if data: + self.last_record = data[-1] + + yield from data + + def update_state(self) -> None: + + if self.last_record: + updated_last_date = self.last_record["audit"]["Last modified at"] + if updated_last_date>self.state[self.cursor_field[1]]: + self.state = {self.cursor_field[1]: updated_last_date} + self.last_record = None + return None + + def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: + + total_elements = int(response.json()["totalElements"]) + page_size = int(response.json()["pageSize"]) + + if total_elements == page_size: + self.current_page = self.current_page + 1 + return {"page": self.current_page} + + self.update_state() + + self.current_page = 0 + + return None + + state_checkpoint_interval = None + + @property + def cursor_field(self) -> List[str]: + return ["audit", "Last modified at"] + + @property + def state(self) -> Mapping[str, Any]: + + if self.cursor_value: + return {self.cursor_field[1]: self.cursor_value} + else: + return {self.cursor_field[1]: self.start_date} + + @state.setter + def state(self, value: Mapping[str, Any]): + self.cursor_value = value[self.cursor_field[1]] + self._state = value + + +class SourceAvni(AbstractSource): + + + def get_client_id(self): + + url_client = "https://app.avniproject.org/idp-details" + response = requests.get(url_client) + response.raise_for_status() + client = response.json() + return client["cognito"]["clientId"] + + def get_token(self, username: str, password: str, app_client_id: str) -> str: + + client = boto3.client("cognito-idp", region_name="ap-south-1") + response = client.initiate_auth( + ClientId=app_client_id, AuthFlow="USER_PASSWORD_AUTH", AuthParameters={"USERNAME": username, "PASSWORD": password} + ) + return response["AuthenticationResult"]["IdToken"] + + + def check_connection(self, logger, config) -> Tuple[bool, any]: + + username = config["username"] + password = config["password"] + + try: + client_id = self.get_client_id() + except Exception as error: + return False, str(error) + ": Please connect With Avni Team" + + try: + + auth_token = self.get_token(username, password, client_id) + stream_kwargs = {"auth_token": auth_token, "start_date": config["start_date"]} + stream = AvniStream(path="subjects",**stream_kwargs).read_records(SyncMode.full_refresh) + return True, None + + except Exception as error: + return False, error + + def generate_streams(self, config: str) -> List[Stream]: + + streams = [] + username = config["username"] + password = config["password"] + + try: + client_id = self.get_client_id() + except Exception as error: + print(str(error) + ": Please connect With Avni Team") + raise error + + auth_token = self.get_token(username, password, client_id) + + endpoints =["subjects","programEnrolments","programEncounters","encounters"] + for endpoint in endpoints: + stream_kwargs = {"auth_token": auth_token, "start_date": config["start_date"]} + stream=AvniStream(path=endpoint,**stream_kwargs) + streams.append(stream) + + return streams + + def streams(self, config: Mapping[str, Any]) -> List[Stream]: + + streams = self.generate_streams(config=config) + return streams \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-avni/source_avni/spec.yaml b/airbyte-integrations/connectors/source-avni/source_avni/spec.yaml new file mode 100644 index 000000000000..573ff4d87f35 --- /dev/null +++ b/airbyte-integrations/connectors/source-avni/source_avni/spec.yaml @@ -0,0 +1,23 @@ +documentationUrl: https://docsurl.com +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Avni Spec + type: object + required: + - username + - password + - start_date + properties: + username: + type: string + description: Your avni platform Username + password: + type: string + description: Your avni platform password + airbyte_secret: true + start_date: + type: string + default: "2000-06-23T01:30:00.000Z" + description: Specify Date and time from which you want to fetch data + examples: + - "2000-10-31T01:30:00.000Z" \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-avni/unit_tests/__init__.py b/airbyte-integrations/connectors/source-avni/unit_tests/__init__.py new file mode 100644 index 000000000000..c941b3045795 --- /dev/null +++ b/airbyte-integrations/connectors/source-avni/unit_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-avni/unit_tests/test_incremental_streams.py b/airbyte-integrations/connectors/source-avni/unit_tests/test_incremental_streams.py new file mode 100644 index 000000000000..ce76621900b2 --- /dev/null +++ b/airbyte-integrations/connectors/source-avni/unit_tests/test_incremental_streams.py @@ -0,0 +1,61 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +from airbyte_cdk.models import SyncMode +from pytest import fixture +from source_avni.source import AvniStream + + +@fixture +def patch_incremental_base_class(mocker): + + mocker.patch.object(AvniStream, "path", "v0/example_endpoint") + mocker.patch.object(AvniStream, "primary_key", "test_primary_key") + mocker.patch.object(AvniStream, "__abstractmethods__", set()) + + +def test_cursor_field(patch_incremental_base_class): + + stream = AvniStream(start_date="",auth_token="",path="") + expected_cursor_field = ["audit","Last modified at"] + assert stream.cursor_field == expected_cursor_field + + +def test_update_state(patch_incremental_base_class): + + stream = AvniStream(start_date="",auth_token="",path="") + stream.state = {"Last modified at":"OldDate"} + stream.last_record = {"audit": {"Last modified at":"NewDate"}} + expected_state = {"Last modified at":"NewDate"} + stream.update_state() + assert stream.state == expected_state + + +def test_stream_slices(patch_incremental_base_class): + + stream = AvniStream(start_date="",auth_token="",path="") + inputs = {"sync_mode": SyncMode.incremental, "cursor_field": [], "stream_state": {}} + expected_stream_slice = [None] + assert stream.stream_slices(**inputs) == expected_stream_slice + + +def test_supports_incremental(patch_incremental_base_class, mocker): + + mocker.patch.object(AvniStream, "cursor_field", "dummy_field") + stream = AvniStream(start_date="",auth_token="",path="") + assert stream.supports_incremental + + +def test_source_defined_cursor(patch_incremental_base_class): + + stream = AvniStream(start_date="",auth_token="",path="") + assert stream.source_defined_cursor + + +def test_stream_checkpoint_interval(patch_incremental_base_class): + + stream = AvniStream(start_date="",auth_token="",path="") + expected_checkpoint_interval = None + assert stream.state_checkpoint_interval == expected_checkpoint_interval diff --git a/airbyte-integrations/connectors/source-avni/unit_tests/test_source.py b/airbyte-integrations/connectors/source-avni/unit_tests/test_source.py new file mode 100644 index 000000000000..ef052ab9094c --- /dev/null +++ b/airbyte-integrations/connectors/source-avni/unit_tests/test_source.py @@ -0,0 +1,38 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from unittest.mock import MagicMock + +from source_avni.source import SourceAvni + + +def test_check_connection(mocker): + + mocker.patch('source_avni.source.SourceAvni.get_token').return_value = "Token" + mocker.patch('source_avni.source.requests.get').return_value.status_code = 200 + source = SourceAvni() + logger_mock = MagicMock() + config_mock = {"username": "test_user", "password": "test_password","start_date": "date"} + result, error = source.check_connection(logger_mock, config_mock) + assert result is True + + +def test_streams(mocker): + + mocker.patch('source_avni.source.SourceAvni.generate_streams').return_value = ["a","b","c","d"] + source = SourceAvni() + config_mock = {"username": "test_user", "password": "test_password", "start_date": "2000-06-27T04:18:36.914Z"} + streams = source.streams(config_mock) + excepted_outcome = 4 + assert len(streams) == excepted_outcome + +def test_generate_streams(mocker): + + mocker.patch('source_avni.source.SourceAvni.get_token').return_value = "Token" + mocker.patch('source_avni.source.SourceAvni.get_client_id').return_value = "Token" + source = SourceAvni() + config_mock = {"username": "test_user", "password": "test_password", "start_date": "2000-06-27T04:18:36.914Z"} + streams = source.generate_streams(config_mock) + assert len(streams)==4 + diff --git a/airbyte-integrations/connectors/source-avni/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-avni/unit_tests/test_streams.py new file mode 100644 index 000000000000..2d709872ef9f --- /dev/null +++ b/airbyte-integrations/connectors/source-avni/unit_tests/test_streams.py @@ -0,0 +1,97 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from http import HTTPStatus +from unittest.mock import MagicMock + +import pytest +from source_avni.source import AvniStream + + +@pytest.fixture +def patch_base_class(mocker): + + mocker.patch.object(AvniStream, "path", "v0/example_endpoint") + mocker.patch.object(AvniStream, "primary_key", "test_primary_key") + mocker.patch.object(AvniStream, "__abstractmethods__", set()) + + +def test_request_params(mocker,patch_base_class): + + stream = AvniStream(start_date="",auth_token="",path="") + inputs = {"stream_slice": None, "stream_state": None, "next_page_token": {"page":5}} + stream.state = {"Last modified at":"AnyDate"} + expected_params = {"lastModifiedDateTime":"AnyDate","page":5} + assert stream.request_params(**inputs) == expected_params + + +def test_next_page_token(patch_base_class): + + stream = AvniStream(start_date="",auth_token="",path="") + response_mock = MagicMock() + response_mock.json.return_value = { + "totalElements": 20, + "totalPages": 10, + "pageSize": 20 + } + + stream.current_page = 1 + inputs = {"response": response_mock} + expected_token = {"page": 2} + + assert stream.next_page_token(**inputs) == expected_token + assert stream.current_page == 2 + + +def test_parse_response(patch_base_class,mocker): + + stream = AvniStream(start_date="",auth_token="",path="") + response = MagicMock + response.content = b'{"content": [{"id": 1, "name": "John"}, {"id": 2, "name": "Jane"}]}' + + inputs = {"response": mocker.Mock(json=mocker.Mock(return_value={"content": [{"id": 1, "name": "Avni"}, {"id": 2, "name": "Airbyte"}]}))} + gen = stream.parse_response(**inputs) + assert next(gen) == {"id": 1, "name": "Avni"} + assert next(gen) == {"id": 2, "name": "Airbyte"} + + +def test_request_headers(patch_base_class): + + stream = AvniStream(start_date="",auth_token="",path="") + inputs = {"stream_slice": None, "stream_state": None, "next_page_token": None} + stream.auth_token = "Token" + expected_headers = {"auth-token":"Token"} + assert stream.request_headers(**inputs) == expected_headers + + +def test_http_method(patch_base_class): + + stream = AvniStream(start_date="",auth_token="",path="") + expected_method = "GET" + assert stream.http_method == expected_method + + +@pytest.mark.parametrize( + ("http_status", "should_retry"), + [ + (HTTPStatus.OK, False), + (HTTPStatus.BAD_REQUEST, False), + (HTTPStatus.TOO_MANY_REQUESTS, True), + (HTTPStatus.INTERNAL_SERVER_ERROR, True), + ], +) +def test_should_retry(patch_base_class, http_status, should_retry): + + response_mock = MagicMock() + response_mock.status_code = http_status + stream = AvniStream(start_date="",auth_token="",path="") + assert stream.should_retry(response_mock) == should_retry + + +def test_backoff_time(patch_base_class): + + response_mock = MagicMock() + stream = AvniStream(start_date="",auth_token="",path="") + expected_backoff_time = None + assert stream.backoff_time(response_mock) == expected_backoff_time diff --git a/docs/integrations/sources/avni.md b/docs/integrations/sources/avni.md new file mode 100644 index 000000000000..526ad98eb376 --- /dev/null +++ b/docs/integrations/sources/avni.md @@ -0,0 +1,47 @@ +# Avni + +This page contains the setup guide and reference information for the Avni source connector. + +## Prerequisites + +- Username of Avni account +- Password of Avni account + +## Setup guide + +### Step 1: Set up an Avni account + +1. Signup on [Avni](https://avniproject.org/) to create an account. +2. Create Forms for Subjects Registrations, Programs Enrolment, Program Encounter using Avni Web Console -> [Getting Started](https://avniproject.org/getting-started/) +3. Register Subjects, Enrol them in Program using Avni Android Application [Here](https://play.google.com/store/apps/details?id=com.openchsclient&hl=en&gl=US) + +### Step 2: Set up the Avni connector in Airbyte + +**For Airbyte Open Source:** + +1. Go to local Airbyte page. +2. In the left navigation bar, click **Sources**. In the top-right corner, click **+ New Source**. +3. On the source setup page, select **Avni** from the Source type dropdown and enter a name for this connector. +4. Enter the **username** and **password** of your Avni account +5. Enter the **lastModifiedDateTime**, ALl the data which have been updated since this time will be returned. The Value should be specified in "yyyy-MM-dd'T'HH:mm:ss.SSSz", e.g. "2000-10-31T01:30:00.000Z". If all the data needed to be fetch keep this parameter to any old date or use e.g. date. +6. Click **Set up source**. + +## Supported sync modes + +The Avni source connector supports the following[ sync modes](https://docs.airbyte.com/cloud/core-concepts#connection-sync-modes): +​ + +- [Full Refresh - Overwrite](https://docs.airbyte.com/understanding-airbyte/connections/full-refresh-overwrite) +- [Full Refresh - Append](https://docs.airbyte.com/understanding-airbyte/connections/full-refresh-append) +- [Incremental Sync - Append](https://docs.airbyte.com/understanding-airbyte/connections/incremental-append) +- (Recommended)[ Incremental Sync - Deduped History](https://docs.airbyte.com/understanding-airbyte/connections/incremental-deduped-history) + + +## Supported Streams + +Avni Source connector Support Following Streams: + +- **Subjects Stream** : This stream provides details of registered subjects. You can retrieve information about subjects who have been registered in the system. +- **Program Enrolment Stream** : This stream provides program enrolment data. You can obtain information about subjects who have enrolled in programs. +- **Program Encounter Stream**, This stream provides data about encounters that occur within programs. You can retrieve information about all the encounters that have taken place within programs. +- **Subject Encounter Stream**, This stream provides data about encounters involving subjects, excluding program encounters. You can obtain information about all the encounters that subjects have had outside of program-encounter. From 1e03cc3108d3894250465d159167da1d8c765315 Mon Sep 17 00:00:00 2001 From: Aviraj Gour Date: Fri, 4 Aug 2023 09:59:06 +0530 Subject: [PATCH 09/38] Change to class implementation --- .../integration_tests/abnormal_state.json | 12 +- .../integration_tests/configured_catalog.json | 76 +++++++-- .../integration_tests/sample_state.json | 12 +- .../connectors/source-avni/metadata.yaml | 2 +- .../source_avni/schemas/encounters.json | 9 +- ...ncounters.json => program_encounters.json} | 9 +- ...nrolments.json => program_enrolments.json} | 6 +- .../source_avni/schemas/subjects.json | 10 +- .../source-avni/source_avni/source.py | 153 +++++++++++------- .../source-avni/source_avni/spec.yaml | 2 +- .../unit_tests/test_incremental_streams.py | 30 ++-- .../source-avni/unit_tests/test_source.py | 38 ++--- .../source-avni/unit_tests/test_streams.py | 16 +- docs/integrations/sources/avni.md | 5 + 14 files changed, 249 insertions(+), 131 deletions(-) rename airbyte-integrations/connectors/source-avni/source_avni/schemas/{programEncounters.json => program_encounters.json} (93%) rename airbyte-integrations/connectors/source-avni/source_avni/schemas/{programEnrolments.json => program_enrolments.json} (95%) diff --git a/airbyte-integrations/connectors/source-avni/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-avni/integration_tests/abnormal_state.json index 25197515a9d0..31f5193c1612 100644 --- a/airbyte-integrations/connectors/source-avni/integration_tests/abnormal_state.json +++ b/airbyte-integrations/connectors/source-avni/integration_tests/abnormal_state.json @@ -1,14 +1,14 @@ { "subjects": { - "Last modified at":"2200-06-27T04:18:36.914Z" + "last_modified_at":"2200-06-27T04:18:36.914Z" }, - "programEnrolments": { - "Last modified at":"2200-06-27T04:18:36.914Z" + "program_enrolments": { + "last_modified_at":"2200-06-27T04:18:36.914Z" }, - "programEncounters": { - "Last modified at":"2200-06-27T04:18:36.914Z" + "program_encounters": { + "last_modified_at":"2200-06-27T04:18:36.914Z" }, "encounters": { - "Last modified at":"2200-06-27T04:18:36.914Z" + "last_modified_at":"2200-06-27T04:18:36.914Z" } } diff --git a/airbyte-integrations/connectors/source-avni/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-avni/integration_tests/configured_catalog.json index e7e42dcc322f..df39258c9271 100644 --- a/airbyte-integrations/connectors/source-avni/integration_tests/configured_catalog.json +++ b/airbyte-integrations/connectors/source-avni/integration_tests/configured_catalog.json @@ -3,57 +3,105 @@ { "stream": { "name": "subjects", - "json_schema": {}, + "json_schema": { + "properties": { + "username": { + "type": "string" + }, + "password": { + "type": "string" + }, + "lastModifiedDateTime":{ + "type": "string" + } + } + }, "supported_sync_modes": ["full_refresh","incremental"], "source_defined_cursor": true, - "default_cursor_field": ["Last modified at"], + "default_cursor_field": ["last_modified_at"], "source_defined_primary_key": [["ID"]] }, "sync_mode": "incremental", "destination_sync_mode": "append_dedup", - "cursor_field": ["audit","Last modified at"], + "cursor_field": ["last_modified_at"], "primary_key": [["ID"]] }, { "stream": { - "name": "programEnrolments", - "json_schema": {}, + "name": "program_enrolments", + "json_schema": { + "properties": { + "username": { + "type": "string" + }, + "password": { + "type": "string" + }, + "lastModifiedDateTime":{ + "type": "string" + } + } + }, "supported_sync_modes": ["full_refresh","incremental"], "source_defined_cursor": true, - "default_cursor_field": ["Last modified at"], + "default_cursor_field": ["last_modified_at"], "source_defined_primary_key": [["ID"]] }, "sync_mode": "incremental", "destination_sync_mode": "append_dedup", - "cursor_field": ["audit","Last modified at"], + "cursor_field": ["last_modified_at"], "primary_key": [["ID"]] }, { "stream": { - "name": "programEncounters", - "json_schema": {}, + "name": "program_encounters", + "json_schema": { + "properties": { + "username": { + "type": "string" + }, + "password": { + "type": "string" + }, + "lastModifiedDateTime":{ + "type": "string" + } + } + }, "supported_sync_modes": ["full_refresh","incremental"], "source_defined_cursor": true, - "default_cursor_field": ["Last modified at"], + "default_cursor_field": ["last_modified_at"], "source_defined_primary_key": [["ID"]] }, "sync_mode": "incremental", "destination_sync_mode": "append_dedup", - "cursor_field": ["audit","Last modified at"], + "cursor_field": ["last_modified_at"], "primary_key": [["ID"]] }, { "stream": { "name": "encounters", - "json_schema": {}, + "json_schema": { + "properties": { + "username": { + "type": "string" + }, + "password": { + "type": "string" + }, + "lastModifiedDateTime":{ + "type": "string" + } + } + }, "supported_sync_modes": ["full_refresh","incremental"], "source_defined_cursor": true, - "default_cursor_field": ["Last modified at"], + "default_cursor_field": ["last_modified_at"], "source_defined_primary_key": [["ID"]] }, "sync_mode": "incremental", "destination_sync_mode": "append_dedup", - "cursor_field": ["audit","Last modified at"], + "cursor_field": ["last_modified_at"], "primary_key": [["ID"]] } ] diff --git a/airbyte-integrations/connectors/source-avni/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-avni/integration_tests/sample_state.json index 7a8c5a6aa518..2a47e04cc4db 100644 --- a/airbyte-integrations/connectors/source-avni/integration_tests/sample_state.json +++ b/airbyte-integrations/connectors/source-avni/integration_tests/sample_state.json @@ -1,14 +1,14 @@ { "subjects": { - "Last modified at":"2000-06-27T04:18:36.914Z" + "last_modified_at":"2000-06-27T04:18:36.914Z" }, - "programEnrolments": { - "Last modified at":"2000-06-27T04:18:36.914Z" + "program_enrolments": { + "last_modified_at":"2000-06-27T04:18:36.914Z" }, - "programEncounters": { - "Last modified at":"2000-06-27T04:18:36.914Z" + "program_encounters": { + "last_modified_at":"2000-06-27T04:18:36.914Z" }, "encounters": { - "Last modified at":"2000-06-27T04:18:36.914Z" + "last_modified_at":"2000-06-27T04:18:36.914Z" } } diff --git a/airbyte-integrations/connectors/source-avni/metadata.yaml b/airbyte-integrations/connectors/source-avni/metadata.yaml index 8834761d1e3c..dbd0ed9f12fa 100644 --- a/airbyte-integrations/connectors/source-avni/metadata.yaml +++ b/airbyte-integrations/connectors/source-avni/metadata.yaml @@ -4,7 +4,7 @@ data: - "*" # Please change to the hostname of the source. registries: oss: - enabled: false + enabled: true connectorSubtype: api connectorType: source definitionId: a4adf548-9f40-4eb7-958f-9ff322abd481 diff --git a/airbyte-integrations/connectors/source-avni/source_avni/schemas/encounters.json b/airbyte-integrations/connectors/source-avni/source_avni/schemas/encounters.json index 92fed7759f76..c1727a9fc73b 100644 --- a/airbyte-integrations/connectors/source-avni/source_avni/schemas/encounters.json +++ b/airbyte-integrations/connectors/source-avni/source_avni/schemas/encounters.json @@ -60,7 +60,8 @@ "type": ["null", "number"] }, "Y": { - "type": ["null", "number"] + "type": ["null", "number"], + "example": 74.7364501 } } }, @@ -72,6 +73,10 @@ "type": ["null", "object"], "additionalProperties": true }, + "last_modified_at":{ + "type": "string", + "format": "YYYY-MM-DDTHH:mm:ss.sssZ" + }, "audit": { "type": ["null", "object"], "additionalProperties": true, @@ -93,4 +98,4 @@ } } } -} +} \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-avni/source_avni/schemas/programEncounters.json b/airbyte-integrations/connectors/source-avni/source_avni/schemas/program_encounters.json similarity index 93% rename from airbyte-integrations/connectors/source-avni/source_avni/schemas/programEncounters.json rename to airbyte-integrations/connectors/source-avni/source_avni/schemas/program_encounters.json index 9ed1cecd0619..3e1a4a25acd9 100644 --- a/airbyte-integrations/connectors/source-avni/source_avni/schemas/programEncounters.json +++ b/airbyte-integrations/connectors/source-avni/source_avni/schemas/program_encounters.json @@ -45,7 +45,8 @@ "type": ["null", "number"] }, "Y": { - "type": ["null", "number"] + "type": ["null", "number"], + "example": 74.7364501 } } }, @@ -82,6 +83,10 @@ "type": ["null", "object"], "additionalProperties": true }, + "last_modified_at":{ + "type": "string", + "format": "YYYY-MM-DDTHH:mm:ss.sssZ" + }, "audit": { "type": ["null", "object"], "properties": { @@ -102,4 +107,4 @@ } } } -} +} \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-avni/source_avni/schemas/programEnrolments.json b/airbyte-integrations/connectors/source-avni/source_avni/schemas/program_enrolments.json similarity index 95% rename from airbyte-integrations/connectors/source-avni/source_avni/schemas/programEnrolments.json rename to airbyte-integrations/connectors/source-avni/source_avni/schemas/program_enrolments.json index 9b1493442d8d..9d858d0b077d 100644 --- a/airbyte-integrations/connectors/source-avni/source_avni/schemas/programEnrolments.json +++ b/airbyte-integrations/connectors/source-avni/source_avni/schemas/program_enrolments.json @@ -68,6 +68,10 @@ "type": ["null", "string"] } }, + "last_modified_at":{ + "type": "string", + "format": "YYYY-MM-DDTHH:mm:ss.sssZ" + }, "audit": { "type": ["null", "object"], "additionalProperties": true, @@ -89,4 +93,4 @@ } } } -} +} \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-avni/source_avni/schemas/subjects.json b/airbyte-integrations/connectors/source-avni/source_avni/schemas/subjects.json index a6f3c2374231..8761f2514761 100644 --- a/airbyte-integrations/connectors/source-avni/source_avni/schemas/subjects.json +++ b/airbyte-integrations/connectors/source-avni/source_avni/schemas/subjects.json @@ -23,7 +23,8 @@ "type": ["null", "number"] }, "Y": { - "type": ["null", "number"] + "type": ["null", "number"], + "example": 74.7364501 } } }, @@ -79,6 +80,11 @@ "format": "uuid" } }, + "last_modified_at":{ + "type": "string", + "format": "YYYY-MM-DDTHH:mm:ss.sssZ" + } + , "audit": { "type": ["null", "object"], "additionalProperties": true, @@ -106,4 +112,4 @@ } } } -} +} \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-avni/source_avni/source.py b/airbyte-integrations/connectors/source-avni/source_avni/source.py index e42430e6f174..e12d819d2c8c 100644 --- a/airbyte-integrations/connectors/source-avni/source_avni/source.py +++ b/airbyte-integrations/connectors/source-avni/source_avni/source.py @@ -7,53 +7,36 @@ import boto3 import requests +from airbyte_cdk.models import SyncMode from airbyte_cdk.sources import AbstractSource from airbyte_cdk.sources.streams import IncrementalMixin, Stream +from airbyte_cdk.sources.streams.core import StreamData from airbyte_cdk.sources.streams.http import HttpStream -from airbyte_cdk.models import SyncMode -class Avni(HttpStream, ABC): +class AvniStream(HttpStream, ABC): url_base = "https://app.avniproject.org/api/" primary_key = "ID" cursor_value = None current_page = 0 last_record = None - - def __init__(self, start_date: str, path , auth_token: str, **kwargs): + + def __init__(self, start_date: str, auth_token: str, **kwargs): super().__init__(**kwargs) self.start_date = start_date self.auth_token = auth_token - self.stream=path - -class AvniStream(Avni,IncrementalMixin): - - """ - - This implement diffrent Stream in Source Avni - - Api docs : https://avni.readme.io/docs/api-guide - Api endpoints : https://app.swaggerhub.com/apis-docs/samanvay/avni-external/1.0.0 - """ - def path(self, **kwargs) -> str: - return self.stream - def request_params( self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None ) -> MutableMapping[str, Any]: - params = {"lastModifiedDateTime": self.state["Last modified at"]} + params = {"lastModifiedDateTime": self.state["last_modified_at"]} if next_page_token: params.update(next_page_token) return params - - @property - def name(self) -> str: - return self.stream - + def request_headers( self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None ) -> Mapping[str, Any]: @@ -68,13 +51,28 @@ def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapp yield from data + def read_records( + self, + sync_mode: SyncMode, + cursor_field: List[str] = None, + stream_slice: Mapping[str, Any] = None, + stream_state: Mapping[str, Any] = None, + ) -> Iterable[StreamData]: + + records = super().read_records(sync_mode, cursor_field, stream_slice, stream_state) + for record in records: + last_modified_at = record["audit"]["Last modified at"] + record["last_modified_at"] = last_modified_at + yield record + def update_state(self) -> None: if self.last_record: updated_last_date = self.last_record["audit"]["Last modified at"] - if updated_last_date>self.state[self.cursor_field[1]]: - self.state = {self.cursor_field[1]: updated_last_date} + if updated_last_date > self.state["last_modified_at"]: + self.state = {self.cursor_field: updated_last_date} self.last_record = None + return None def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: @@ -92,29 +90,78 @@ def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, return None + +class IncrementalAvniStream(AvniStream, IncrementalMixin, ABC): + state_checkpoint_interval = None @property - def cursor_field(self) -> List[str]: - return ["audit", "Last modified at"] + def cursor_field(self) -> str: + return "last_modified_at" @property def state(self) -> Mapping[str, Any]: if self.cursor_value: - return {self.cursor_field[1]: self.cursor_value} + return {self.cursor_field: self.cursor_value} else: - return {self.cursor_field[1]: self.start_date} + return {self.cursor_field: self.start_date} @state.setter def state(self, value: Mapping[str, Any]): - self.cursor_value = value[self.cursor_field[1]] + self.cursor_value = value[self.cursor_field] self._state = value -class SourceAvni(AbstractSource): - +class Subjects(IncrementalAvniStream): + + """ + This implement Subject Stream in Source Avni + Api docs : https://avni.readme.io/docs/api-guide + Api endpoints : https://app.swaggerhub.com/apis-docs/samanvay/avni-external/1.0.0 + """ + + def path(self, **kwargs) -> str: + return "subjects" + + +class ProgramEnrolments(IncrementalAvniStream): + + """ + This implement ProgramEnrolments Stream in Source Avni + Api docs : https://avni.readme.io/docs/api-guide + Api endpoints : https://app.swaggerhub.com/apis-docs/samanvay/avni-external/1.0.0 + """ + def path(self, **kwargs) -> str: + return "programEnrolments" + + +class ProgramEncounters(IncrementalAvniStream): + + """ + This implement ProgramEncounters Stream in Source Avni + Api docs : https://avni.readme.io/docs/api-guide + Api endpoints : https://app.swaggerhub.com/apis-docs/samanvay/avni-external/1.0.0 + """ + + def path(self, **kwargs) -> str: + return "programEncounters" + + +class Encounters(IncrementalAvniStream): + + """ + This implement Encounters Stream in Source Avni + Api docs : https://avni.readme.io/docs/api-guide + Api endpoints : https://app.swaggerhub.com/apis-docs/samanvay/avni-external/1.0.0 + """ + + def path(self, **kwargs) -> str: + return "encounters" + + +class SourceAvni(AbstractSource): def get_client_id(self): url_client = "https://app.avniproject.org/idp-details" @@ -125,15 +172,19 @@ def get_client_id(self): def get_token(self, username: str, password: str, app_client_id: str) -> str: + """ + Avni Api Authentication : https://avni.readme.io/docs/api-guide#authentication + AWS Cognito for authentication : https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/cognito-idp/client/initiate_auth.html + """ + client = boto3.client("cognito-idp", region_name="ap-south-1") response = client.initiate_auth( ClientId=app_client_id, AuthFlow="USER_PASSWORD_AUTH", AuthParameters={"USERNAME": username, "PASSWORD": password} ) return response["AuthenticationResult"]["IdToken"] - - + def check_connection(self, logger, config) -> Tuple[bool, any]: - + username = config["username"] password = config["password"] @@ -143,18 +194,16 @@ def check_connection(self, logger, config) -> Tuple[bool, any]: return False, str(error) + ": Please connect With Avni Team" try: - auth_token = self.get_token(username, password, client_id) stream_kwargs = {"auth_token": auth_token, "start_date": config["start_date"]} - stream = AvniStream(path="subjects",**stream_kwargs).read_records(SyncMode.full_refresh) + next(Subjects(**stream_kwargs).read_records(SyncMode.full_refresh)) return True, None - + except Exception as error: return False, error - - def generate_streams(self, config: str) -> List[Stream]: - - streams = [] + + def streams(self, config: Mapping[str, Any]) -> List[Stream]: + username = config["username"] password = config["password"] @@ -165,16 +214,12 @@ def generate_streams(self, config: str) -> List[Stream]: raise error auth_token = self.get_token(username, password, client_id) - - endpoints =["subjects","programEnrolments","programEncounters","encounters"] - for endpoint in endpoints: - stream_kwargs = {"auth_token": auth_token, "start_date": config["start_date"]} - stream=AvniStream(path=endpoint,**stream_kwargs) - streams.append(stream) - return streams - - def streams(self, config: Mapping[str, Any]) -> List[Stream]: + stream_kwargs = {"auth_token": auth_token, "start_date": config["start_date"]} - streams = self.generate_streams(config=config) - return streams \ No newline at end of file + return [ + Subjects(**stream_kwargs), + ProgramEnrolments(**stream_kwargs), + ProgramEncounters(**stream_kwargs), + Encounters(**stream_kwargs), + ] diff --git a/airbyte-integrations/connectors/source-avni/source_avni/spec.yaml b/airbyte-integrations/connectors/source-avni/source_avni/spec.yaml index 573ff4d87f35..b010d2cf1584 100644 --- a/airbyte-integrations/connectors/source-avni/source_avni/spec.yaml +++ b/airbyte-integrations/connectors/source-avni/source_avni/spec.yaml @@ -1,4 +1,4 @@ -documentationUrl: https://docsurl.com +documentationUrl: https://docs.airbyte.com/integrations/sources/avni connectionSpecification: $schema: http://json-schema.org/draft-07/schema# title: Avni Spec diff --git a/airbyte-integrations/connectors/source-avni/unit_tests/test_incremental_streams.py b/airbyte-integrations/connectors/source-avni/unit_tests/test_incremental_streams.py index ce76621900b2..ccd09b924011 100644 --- a/airbyte-integrations/connectors/source-avni/unit_tests/test_incremental_streams.py +++ b/airbyte-integrations/connectors/source-avni/unit_tests/test_incremental_streams.py @@ -5,37 +5,37 @@ from airbyte_cdk.models import SyncMode from pytest import fixture -from source_avni.source import AvniStream +from source_avni.source import IncrementalAvniStream @fixture def patch_incremental_base_class(mocker): - mocker.patch.object(AvniStream, "path", "v0/example_endpoint") - mocker.patch.object(AvniStream, "primary_key", "test_primary_key") - mocker.patch.object(AvniStream, "__abstractmethods__", set()) + mocker.patch.object(IncrementalAvniStream, "path", "v0/example_endpoint") + mocker.patch.object(IncrementalAvniStream, "primary_key", "test_primary_key") + mocker.patch.object(IncrementalAvniStream, "__abstractmethods__", set()) def test_cursor_field(patch_incremental_base_class): - stream = AvniStream(start_date="",auth_token="",path="") - expected_cursor_field = ["audit","Last modified at"] + stream = IncrementalAvniStream(start_date="",auth_token="") + expected_cursor_field = "last_modified_at" assert stream.cursor_field == expected_cursor_field def test_update_state(patch_incremental_base_class): - stream = AvniStream(start_date="",auth_token="",path="") - stream.state = {"Last modified at":"OldDate"} - stream.last_record = {"audit": {"Last modified at":"NewDate"}} - expected_state = {"Last modified at":"NewDate"} + stream = IncrementalAvniStream(start_date="",auth_token="") + stream.state = {"last_modified_at":"2000-06-27T04:18:36.914Z"} + stream.last_record = {"audit":{"Last modified at":"2001-06-27T04:18:36.914Z"}} + expected_state = {"last_modified_at":"2001-06-27T04:18:36.914Z"} stream.update_state() assert stream.state == expected_state def test_stream_slices(patch_incremental_base_class): - stream = AvniStream(start_date="",auth_token="",path="") + stream = IncrementalAvniStream(start_date="",auth_token="") inputs = {"sync_mode": SyncMode.incremental, "cursor_field": [], "stream_state": {}} expected_stream_slice = [None] assert stream.stream_slices(**inputs) == expected_stream_slice @@ -43,19 +43,19 @@ def test_stream_slices(patch_incremental_base_class): def test_supports_incremental(patch_incremental_base_class, mocker): - mocker.patch.object(AvniStream, "cursor_field", "dummy_field") - stream = AvniStream(start_date="",auth_token="",path="") + mocker.patch.object(IncrementalAvniStream, "cursor_field", "dummy_field") + stream = IncrementalAvniStream(start_date="",auth_token="") assert stream.supports_incremental def test_source_defined_cursor(patch_incremental_base_class): - stream = AvniStream(start_date="",auth_token="",path="") + stream = IncrementalAvniStream(start_date="",auth_token="") assert stream.source_defined_cursor def test_stream_checkpoint_interval(patch_incremental_base_class): - stream = AvniStream(start_date="",auth_token="",path="") + stream = IncrementalAvniStream(start_date="",auth_token="") expected_checkpoint_interval = None assert stream.state_checkpoint_interval == expected_checkpoint_interval diff --git a/airbyte-integrations/connectors/source-avni/unit_tests/test_source.py b/airbyte-integrations/connectors/source-avni/unit_tests/test_source.py index ef052ab9094c..7022f4de4ce4 100644 --- a/airbyte-integrations/connectors/source-avni/unit_tests/test_source.py +++ b/airbyte-integrations/connectors/source-avni/unit_tests/test_source.py @@ -2,37 +2,37 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # -from unittest.mock import MagicMock +from unittest.mock import patch from source_avni.source import SourceAvni -def test_check_connection(mocker): - - mocker.patch('source_avni.source.SourceAvni.get_token').return_value = "Token" - mocker.patch('source_avni.source.requests.get').return_value.status_code = 200 - source = SourceAvni() - logger_mock = MagicMock() - config_mock = {"username": "test_user", "password": "test_password","start_date": "date"} - result, error = source.check_connection(logger_mock, config_mock) - assert result is True +def test_check_connection_success(mocker): + with patch('source_avni.source.SourceAvni.get_client_id') as get_client_id_mock, \ + patch('source_avni.source.SourceAvni.get_token') as get_token_mock, \ + patch('source_avni.source.Subjects.read_records') as read_records_mock: + get_client_id_mock.return_value = "ClientID" + get_token_mock.return_value = "Token" + read_records_mock.return_value = iter(["record1", "record2"]) + source = SourceAvni() + config_mock = {"username": "test_user", "password": "test_password", "start_date": "2000-06-27T04:18:36.914Z"} + result,msg = source.check_connection(None, config_mock) + assert result is True def test_streams(mocker): - mocker.patch('source_avni.source.SourceAvni.generate_streams').return_value = ["a","b","c","d"] + mocker.patch('source_avni.source.SourceAvni.get_token').return_value = 'fake_token' source = SourceAvni() config_mock = {"username": "test_user", "password": "test_password", "start_date": "2000-06-27T04:18:36.914Z"} streams = source.streams(config_mock) excepted_outcome = 4 assert len(streams) == excepted_outcome -def test_generate_streams(mocker): - - mocker.patch('source_avni.source.SourceAvni.get_token').return_value = "Token" - mocker.patch('source_avni.source.SourceAvni.get_client_id').return_value = "Token" + +def test_get_client_id(mocker): + source = SourceAvni() - config_mock = {"username": "test_user", "password": "test_password", "start_date": "2000-06-27T04:18:36.914Z"} - streams = source.generate_streams(config_mock) - assert len(streams)==4 - + client_id = source.get_client_id() + expected_length = 26 + assert len(client_id) == expected_length diff --git a/airbyte-integrations/connectors/source-avni/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-avni/unit_tests/test_streams.py index 2d709872ef9f..c0be8d8359d4 100644 --- a/airbyte-integrations/connectors/source-avni/unit_tests/test_streams.py +++ b/airbyte-integrations/connectors/source-avni/unit_tests/test_streams.py @@ -19,16 +19,16 @@ def patch_base_class(mocker): def test_request_params(mocker,patch_base_class): - stream = AvniStream(start_date="",auth_token="",path="") + stream = AvniStream(start_date="",auth_token="") inputs = {"stream_slice": None, "stream_state": None, "next_page_token": {"page":5}} - stream.state = {"Last modified at":"AnyDate"} + stream.state = {"last_modified_at":"AnyDate"} expected_params = {"lastModifiedDateTime":"AnyDate","page":5} assert stream.request_params(**inputs) == expected_params def test_next_page_token(patch_base_class): - stream = AvniStream(start_date="",auth_token="",path="") + stream = AvniStream(start_date="",auth_token="") response_mock = MagicMock() response_mock.json.return_value = { "totalElements": 20, @@ -46,7 +46,7 @@ def test_next_page_token(patch_base_class): def test_parse_response(patch_base_class,mocker): - stream = AvniStream(start_date="",auth_token="",path="") + stream = AvniStream(start_date="",auth_token="") response = MagicMock response.content = b'{"content": [{"id": 1, "name": "John"}, {"id": 2, "name": "Jane"}]}' @@ -58,7 +58,7 @@ def test_parse_response(patch_base_class,mocker): def test_request_headers(patch_base_class): - stream = AvniStream(start_date="",auth_token="",path="") + stream = AvniStream(start_date="",auth_token="") inputs = {"stream_slice": None, "stream_state": None, "next_page_token": None} stream.auth_token = "Token" expected_headers = {"auth-token":"Token"} @@ -67,7 +67,7 @@ def test_request_headers(patch_base_class): def test_http_method(patch_base_class): - stream = AvniStream(start_date="",auth_token="",path="") + stream = AvniStream(start_date="",auth_token="") expected_method = "GET" assert stream.http_method == expected_method @@ -85,13 +85,13 @@ def test_should_retry(patch_base_class, http_status, should_retry): response_mock = MagicMock() response_mock.status_code = http_status - stream = AvniStream(start_date="",auth_token="",path="") + stream = AvniStream(start_date="",auth_token="") assert stream.should_retry(response_mock) == should_retry def test_backoff_time(patch_base_class): response_mock = MagicMock() - stream = AvniStream(start_date="",auth_token="",path="") + stream = AvniStream(start_date="",auth_token="") expected_backoff_time = None assert stream.backoff_time(response_mock) == expected_backoff_time diff --git a/docs/integrations/sources/avni.md b/docs/integrations/sources/avni.md index 526ad98eb376..8ff39909c6bf 100644 --- a/docs/integrations/sources/avni.md +++ b/docs/integrations/sources/avni.md @@ -45,3 +45,8 @@ Avni Source connector Support Following Streams: - **Program Enrolment Stream** : This stream provides program enrolment data. You can obtain information about subjects who have enrolled in programs. - **Program Encounter Stream**, This stream provides data about encounters that occur within programs. You can retrieve information about all the encounters that have taken place within programs. - **Subject Encounter Stream**, This stream provides data about encounters involving subjects, excluding program encounters. You can obtain information about all the encounters that subjects have had outside of program-encounter. + +## Changelog + +| Version | Date | Pull Request | Subject | +| 0.1.0 | 2023-07-27 | [28141](https://github.com/airbytehq/airbyte/pull/28141) | Avni Source Connector | From 793f70053f7936e7a7914d9b588cb0185cc8f221 Mon Sep 17 00:00:00 2001 From: Rohit Chatterjee Date: Thu, 10 Aug 2023 21:09:57 +0530 Subject: [PATCH 10/38] Added title to form properties --- .../connectors/source-avni/source_avni/spec.yaml | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/airbyte-integrations/connectors/source-avni/source_avni/spec.yaml b/airbyte-integrations/connectors/source-avni/source_avni/spec.yaml index b010d2cf1584..2ecbcd34dae2 100644 --- a/airbyte-integrations/connectors/source-avni/source_avni/spec.yaml +++ b/airbyte-integrations/connectors/source-avni/source_avni/spec.yaml @@ -10,14 +10,17 @@ connectionSpecification: properties: username: type: string - description: Your avni platform Username + description: Your Avni platform username + title: Username password: type: string - description: Your avni platform password + description: Your Avni platform password + title: Password airbyte_secret: true start_date: type: string default: "2000-06-23T01:30:00.000Z" description: Specify Date and time from which you want to fetch data + title: Start date and time examples: - "2000-10-31T01:30:00.000Z" \ No newline at end of file From ded1329a358f6172dde9a7334a84b1f88afa877f Mon Sep 17 00:00:00 2001 From: Aviraj Gour Date: Mon, 21 Aug 2023 23:13:44 +0530 Subject: [PATCH 11/38] incremental sync added --- .../integration_tests/configured_catalog.json | 239 ++++++++---------- .../source-glific/source_glific/source.py | 102 +++++--- 2 files changed, 173 insertions(+), 168 deletions(-) diff --git a/airbyte-integrations/connectors/source-glific/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-glific/integration_tests/configured_catalog.json index 1831d0ecb926..a0252d4a95c4 100644 --- a/airbyte-integrations/connectors/source-glific/integration_tests/configured_catalog.json +++ b/airbyte-integrations/connectors/source-glific/integration_tests/configured_catalog.json @@ -1,191 +1,166 @@ { "streams": [ { - "cursor_field": null, - "destination_sync_mode": "append", - "primary_key": null, "stream": { - "default_cursor_field": null, - "json_schema": {}, "name": "contacts", - "namespace": null, - "source_defined_cursor": null, - "source_defined_primary_key": null, - "supported_sync_modes": [ - "full_refresh" - ] + "json_schema": { + }, + "supported_sync_modes": ["full_refresh","incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updated_at"], + "source_defined_primary_key": null }, - "sync_mode": "full_refresh" + "sync_mode": "incremental", + "destination_sync_mode": "append", + "cursor_field": ["updated_at"], + "primary_key": null }, { - "cursor_field": null, - "destination_sync_mode": "append", - "primary_key": null, "stream": { - "default_cursor_field": null, - "json_schema": {}, "name": "messages", - "namespace": null, - "source_defined_cursor": null, - "source_defined_primary_key": null, - "supported_sync_modes": [ - "full_refresh" - ] + "json_schema": { + }, + "supported_sync_modes": ["full_refresh","incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updated_at"], + "source_defined_primary_key": null }, - "sync_mode": "full_refresh" + "sync_mode": "incremental", + "destination_sync_mode": "append", + "cursor_field": ["updated_at"], + "primary_key": null }, { - "cursor_field": null, - "destination_sync_mode": "append", - "primary_key": null, "stream": { - "default_cursor_field": null, - "json_schema": {}, "name": "messages_media", - "namespace": null, - "source_defined_cursor": null, - "source_defined_primary_key": null, - "supported_sync_modes": [ - "full_refresh" - ] + "json_schema": { + }, + "supported_sync_modes": ["full_refresh","incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updated_at"], + "source_defined_primary_key": null }, - "sync_mode": "full_refresh" + "sync_mode": "incremental", + "destination_sync_mode": "append", + "cursor_field": ["updated_at"], + "primary_key": null }, { - "cursor_field": null, - "destination_sync_mode": "append", - "primary_key": null, "stream": { - "default_cursor_field": null, - "json_schema": {}, "name": "locations", - "namespace": null, - "source_defined_cursor": null, - "source_defined_primary_key": null, - "supported_sync_modes": [ - "full_refresh" - ] + "json_schema": { + }, + "supported_sync_modes": ["full_refresh","incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updated_at"], + "source_defined_primary_key": null }, - "sync_mode": "full_refresh" + "sync_mode": "incremental", + "destination_sync_mode": "append", + "cursor_field": ["updated_at"], + "primary_key": null }, { - "cursor_field": null, - "destination_sync_mode": "append", - "primary_key": null, "stream": { - "default_cursor_field": null, - "json_schema": {}, "name": "flows", - "namespace": null, - "source_defined_cursor": null, - "source_defined_primary_key": null, - "supported_sync_modes": [ - "full_refresh" - ] + "json_schema": { + }, + "supported_sync_modes": ["full_refresh","incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updated_at"], + "source_defined_primary_key": null }, - "sync_mode": "full_refresh" + "sync_mode": "incremental", + "destination_sync_mode": "append", + "cursor_field": ["updated_at"], + "primary_key": null }, { - "cursor_field": null, - "destination_sync_mode": "append", - "primary_key": null, "stream": { - "default_cursor_field": null, - "json_schema": {}, "name": "flow_results", - "namespace": null, - "source_defined_cursor": null, - "source_defined_primary_key": null, - "supported_sync_modes": [ - "full_refresh" - ] + "json_schema": { + }, + "supported_sync_modes": ["full_refresh","incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updated_at"], + "source_defined_primary_key": null }, - "sync_mode": "full_refresh" + "sync_mode": "incremental", + "destination_sync_mode": "append", + "cursor_field": ["updated_at"], + "primary_key": null }, { - "cursor_field": null, - "destination_sync_mode": "append", - "primary_key": null, "stream": { - "default_cursor_field": null, - "json_schema": {}, "name": "groups", - "namespace": null, - "source_defined_cursor": null, - "source_defined_primary_key": null, - "supported_sync_modes": [ - "full_refresh" - ] + "json_schema": { + }, + "supported_sync_modes": ["full_refresh","incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updated_at"], + "source_defined_primary_key": null }, - "sync_mode": "full_refresh" + "sync_mode": "incremental", + "destination_sync_mode": "append", + "cursor_field": ["updated_at"], + "primary_key": null }, { - "cursor_field": null, - "destination_sync_mode": "append", - "primary_key": null, "stream": { - "default_cursor_field": null, - "json_schema": {}, "name": "interactive_templates", - "namespace": null, - "source_defined_cursor": null, - "source_defined_primary_key": null, - "supported_sync_modes": [ - "full_refresh" - ] + "json_schema": { + }, + "supported_sync_modes": ["full_refresh","incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updated_at"], + "source_defined_primary_key": null }, - "sync_mode": "full_refresh" + "sync_mode": "incremental", + "destination_sync_mode": "append", + "cursor_field": ["updated_at"], + "primary_key": null }, { - "cursor_field": null, - "destination_sync_mode": "append", - "primary_key": null, "stream": { - "default_cursor_field": null, - "json_schema": {}, "name": "organizations", - "namespace": null, - "source_defined_cursor": null, - "source_defined_primary_key": null, - "supported_sync_modes": [ - "full_refresh" - ] + "json_schema": { + }, + "supported_sync_modes": ["full_refresh"], + "source_defined_primary_key": null }, - "sync_mode": "full_refresh" + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite", + "primary_key": null }, { - "cursor_field": null, - "destination_sync_mode": "append", - "primary_key": null, "stream": { - "default_cursor_field": null, - "json_schema": {}, "name": "organization_data", - "namespace": null, - "source_defined_cursor": null, - "source_defined_primary_key": null, - "supported_sync_modes": [ - "full_refresh" - ] + "json_schema": { + }, + "supported_sync_modes": ["full_refresh","incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updated_at"], + "source_defined_primary_key": null }, - "sync_mode": "full_refresh" + "sync_mode": "incremental", + "destination_sync_mode": "append", + "cursor_field": ["updated_at"], + "primary_key": null }, { - "cursor_field": null, - "destination_sync_mode": "append", - "primary_key": null, "stream": { - "default_cursor_field": null, - "json_schema": {}, "name": "profiles", - "namespace": null, - "source_defined_cursor": null, - "source_defined_primary_key": null, - "supported_sync_modes": [ - "full_refresh" - ] + "json_schema": { + }, + "supported_sync_modes": ["full_refresh","incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updated_at"], + "source_defined_primary_key": null }, - "sync_mode": "full_refresh" + "sync_mode": "incremental", + "destination_sync_mode": "append", + "cursor_field": ["updated_at"], + "primary_key": null } ] } \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-glific/source_glific/source.py b/airbyte-integrations/connectors/source-glific/source_glific/source.py index 543624f42412..4b6dd9a36972 100644 --- a/airbyte-integrations/connectors/source-glific/source_glific/source.py +++ b/airbyte-integrations/connectors/source-glific/source_glific/source.py @@ -5,13 +5,16 @@ from abc import ABC from typing import Any, Iterable, List, Mapping, MutableMapping, Optional, Tuple, Union +from airbyte_cdk.models import SyncMode from airbyte_cdk.sources.streams.http.auth.core import HttpAuthenticator import requests import json from airbyte_cdk.sources import AbstractSource -from airbyte_cdk.sources.streams import Stream +from airbyte_cdk.sources.streams import Stream, IncrementalMixin from airbyte_cdk.sources.streams.http import HttpStream +from airbyte_cdk.sources.streams.core import StreamData +from datetime import datetime @@ -32,6 +35,8 @@ class GlificStream(HttpStream, ABC): primary_key = None + cursor_value = None + latest_updated_date = None """ This class represents a stream output by the connector. @@ -66,6 +71,7 @@ def __init__(self, stream_name: str, url_base: str, pagination_limit: int, crede self.pagination_limit = pagination_limit self.start_time = config['start_time'] self.offset = 0 + self.last_record = None @property def url_base(self) -> str: @@ -86,23 +92,18 @@ def get_json_schema(self) -> dict: def path(self, *, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None) -> str: return "" + + def update_state(self) -> None: + + if self.latest_updated_date: + if self.latest_updated_date> self.state['updated_at']: + self.state = {self.cursor_field: self.latest_updated_date} + self.latest_updated_date = None + return None def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: - """ - Override this method to define a pagination strategy. If you will not be using pagination, no action is required - just return None. - - This method should return a Mapping (e.g: dict) containing whatever information required to make paginated requests. This dict is passed - to most other methods in this class to help you form headers, request bodies, query params, etc.. - - For example, if the API accepts a 'page' parameter to determine which page of the result to return, and a response from the API contains a - 'page' number, then this method should probably return a dict {'page': response.json()['page'] + 1} to increment the page count by 1. - The request_params method should then read the input next_page_token and set the 'page' param to next_page_token['page']. - - :param response: the most recent response from the API - :return If there is another page in the result, a mapping (e.g: dict) containing information needed to query the next page in the response. - If there are no more pages in the result, return None. - """ - + + json_resp = response.json() if json_resp['data']['organizationExportData'] is not None: records_str = json_resp['data']['organizationExportData']['data'] @@ -114,20 +115,21 @@ def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, self.offset += 1 return {"offset": self.offset, "limit": self.pagination_limit} + self.update_state() + return None def request_headers(self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None) -> Mapping[str, Any]: - """Add the authorization token in the headers""" + return {'authorization': self.credentials['access_token'], 'Content-Type': 'application/json'} - + def request_body_json(self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None) -> Mapping: - """Request body to post""" query = "query organizationExportData($filter: ExportFilter) { organizationExportData(filter: $filter) {data errors { key message } } }" - + filter_obj = { - "startTime": self.start_time, + "startTime": self.state['updated_at'], "offset": self.offset, "limit": self.pagination_limit, "tables": [self.stream_name] @@ -140,24 +142,52 @@ def request_body_json(self, stream_state: Mapping[str, Any], stream_slice: Mappi return {"query": query, "variables": {"filter": filter_obj}} def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: - """ - Override this method to define how a response is parsed. - :return an iterable containing each record in the response - """ + json_resp = response.json() if json_resp['data']['organizationExportData'] is not None: records_str = json_resp['data']['organizationExportData']['data'] records_obj = json.loads(records_str) - if self.stream_name in records_obj['data']: - records = json.loads(records_str)['data'][f'{self.stream_name}'] - col_names = records[0].split(',') - print("NOOO OFFF COOLLLSS", len(col_names)) - for i in range(1, len(records)): # each record - record = {} - print("RECORD NOO OOFFF VALLLSS",i, len(records[i].split(','))) - for j, col_val in enumerate(records[i].split(',')): # each col_val - record[col_names[j]] = col_val - yield record + yield from records_obj['data'][self.stream_name] + + + def read_records(self, sync_mode: SyncMode, cursor_field: List[str] = None, stream_slice: Mapping[str, Any] = None, stream_state: Mapping[str, Any] = None,) -> Iterable[StreamData]: + records = super().read_records(sync_mode, cursor_field, stream_slice, stream_state) + + for record in records: + if(len(record['updated_at'])==19): + record['updated_at'] = record['updated_at'] + 'Z' + else: + record['updated_at'] = datetime.strptime(record['updated_at'], "%Y-%m-%dT%H:%M:%S.%f").strftime("%Y-%m-%dT%H:%M:%SZ") + + if self.latest_updated_date: + if record['updated_at']>self.latest_updated_date: + self.latest_updated_date = record['updated_at'] + else: + self.latest_updated_date = record['updated_at'] + yield record + + +class IncrementalGlificStream(GlificStream, IncrementalMixin, ABC): + + state_checkpoint_interval = None + + @property + def cursor_field(self) -> str: + return "updated_at" + + @property + def state(self) -> Mapping[str, Any]: + + if self.cursor_value: + return {self.cursor_field: self.cursor_value} + else: + return {self.cursor_field: self.start_time} + + @state.setter + def state(self, value: Mapping[str, Any]): + self.cursor_value = value[self.cursor_field] + self._state = value + # Source class SourceGlific(AbstractSource): @@ -244,7 +274,7 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: export_config = json.loads(data['data']['organizationExportConfig']['data']) streams = [] for table in export_config['tables']: - stream_obj = GlificStream(table, self.API_URL, self.PAGINATION_LIMIT, credentials, config) + stream_obj = IncrementalGlificStream(table, self.API_URL, self.PAGINATION_LIMIT, credentials, config) streams.append(stream_obj) return streams From 48c10a5cfec33f78843d3e809e3829cdc5478174 Mon Sep 17 00:00:00 2001 From: Aviraj Gour Date: Tue, 29 Aug 2023 18:25:34 +0530 Subject: [PATCH 12/38] primary key --- .../integration_tests/configured_catalog.json | 44 +++++++++---------- .../source-glific/source_glific/source.py | 2 +- 2 files changed, 23 insertions(+), 23 deletions(-) diff --git a/airbyte-integrations/connectors/source-glific/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-glific/integration_tests/configured_catalog.json index a0252d4a95c4..252f6fb62a93 100644 --- a/airbyte-integrations/connectors/source-glific/integration_tests/configured_catalog.json +++ b/airbyte-integrations/connectors/source-glific/integration_tests/configured_catalog.json @@ -8,12 +8,12 @@ "supported_sync_modes": ["full_refresh","incremental"], "source_defined_cursor": true, "default_cursor_field": ["updated_at"], - "source_defined_primary_key": null + "source_defined_primary_key": [["id"]] }, "sync_mode": "incremental", "destination_sync_mode": "append", "cursor_field": ["updated_at"], - "primary_key": null + "primary_key": [["id"]] }, { "stream": { @@ -23,12 +23,12 @@ "supported_sync_modes": ["full_refresh","incremental"], "source_defined_cursor": true, "default_cursor_field": ["updated_at"], - "source_defined_primary_key": null + "source_defined_primary_key": [["id"]] }, "sync_mode": "incremental", "destination_sync_mode": "append", "cursor_field": ["updated_at"], - "primary_key": null + "primary_key": [["id"]] }, { "stream": { @@ -38,12 +38,12 @@ "supported_sync_modes": ["full_refresh","incremental"], "source_defined_cursor": true, "default_cursor_field": ["updated_at"], - "source_defined_primary_key": null + "source_defined_primary_key": [["id"]] }, "sync_mode": "incremental", "destination_sync_mode": "append", "cursor_field": ["updated_at"], - "primary_key": null + "primary_key": [["id"]] }, { "stream": { @@ -53,12 +53,12 @@ "supported_sync_modes": ["full_refresh","incremental"], "source_defined_cursor": true, "default_cursor_field": ["updated_at"], - "source_defined_primary_key": null + "source_defined_primary_key": [["id"]] }, "sync_mode": "incremental", "destination_sync_mode": "append", "cursor_field": ["updated_at"], - "primary_key": null + "primary_key": [["id"]] }, { "stream": { @@ -68,12 +68,12 @@ "supported_sync_modes": ["full_refresh","incremental"], "source_defined_cursor": true, "default_cursor_field": ["updated_at"], - "source_defined_primary_key": null + "source_defined_primary_key": [["id"]] }, "sync_mode": "incremental", "destination_sync_mode": "append", "cursor_field": ["updated_at"], - "primary_key": null + "primary_key": [["id"]] }, { "stream": { @@ -83,12 +83,12 @@ "supported_sync_modes": ["full_refresh","incremental"], "source_defined_cursor": true, "default_cursor_field": ["updated_at"], - "source_defined_primary_key": null + "source_defined_primary_key": [["id"]] }, "sync_mode": "incremental", "destination_sync_mode": "append", "cursor_field": ["updated_at"], - "primary_key": null + "primary_key": [["id"]] }, { "stream": { @@ -98,12 +98,12 @@ "supported_sync_modes": ["full_refresh","incremental"], "source_defined_cursor": true, "default_cursor_field": ["updated_at"], - "source_defined_primary_key": null + "source_defined_primary_key": [["id"]] }, "sync_mode": "incremental", "destination_sync_mode": "append", "cursor_field": ["updated_at"], - "primary_key": null + "primary_key": [["id"]] }, { "stream": { @@ -113,12 +113,12 @@ "supported_sync_modes": ["full_refresh","incremental"], "source_defined_cursor": true, "default_cursor_field": ["updated_at"], - "source_defined_primary_key": null + "source_defined_primary_key": [["id"]] }, "sync_mode": "incremental", "destination_sync_mode": "append", "cursor_field": ["updated_at"], - "primary_key": null + "primary_key": [["id"]] }, { "stream": { @@ -126,11 +126,11 @@ "json_schema": { }, "supported_sync_modes": ["full_refresh"], - "source_defined_primary_key": null + "source_defined_primary_key": [["id"]] }, "sync_mode": "full_refresh", "destination_sync_mode": "overwrite", - "primary_key": null + "primary_key": [["id"]] }, { "stream": { @@ -140,12 +140,12 @@ "supported_sync_modes": ["full_refresh","incremental"], "source_defined_cursor": true, "default_cursor_field": ["updated_at"], - "source_defined_primary_key": null + "source_defined_primary_key": [["id"]] }, "sync_mode": "incremental", "destination_sync_mode": "append", "cursor_field": ["updated_at"], - "primary_key": null + "primary_key": [["id"]] }, { "stream": { @@ -155,12 +155,12 @@ "supported_sync_modes": ["full_refresh","incremental"], "source_defined_cursor": true, "default_cursor_field": ["updated_at"], - "source_defined_primary_key": null + "source_defined_primary_key": [["id"]] }, "sync_mode": "incremental", "destination_sync_mode": "append", "cursor_field": ["updated_at"], - "primary_key": null + "primary_key": [["id"]] } ] } \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-glific/source_glific/source.py b/airbyte-integrations/connectors/source-glific/source_glific/source.py index 4b6dd9a36972..e58fa47ca6b6 100644 --- a/airbyte-integrations/connectors/source-glific/source_glific/source.py +++ b/airbyte-integrations/connectors/source-glific/source_glific/source.py @@ -34,7 +34,7 @@ # Basic full refresh stream class GlificStream(HttpStream, ABC): - primary_key = None + primary_key = "id" cursor_value = None latest_updated_date = None From 8e68f67a41d21a292b766e7d1277a8bdd46f8877 Mon Sep 17 00:00:00 2001 From: Aviraj Gour Date: Thu, 7 Sep 2023 14:21:00 +0530 Subject: [PATCH 13/38] migrated to lowcode --- .../connectors/source-avni/Dockerfile | 2 +- .../connectors/source-avni/README.md | 64 +---- .../source-avni/{unit_tests => }/__init__.py | 0 .../source-avni/acceptance-test-config.yml | 46 ++-- .../integration_tests/configured_catalog.json | 48 +--- .../connectors/source-avni/metadata.yaml | 14 +- .../connectors/source-avni/setup.py | 3 +- .../source-avni/source_avni/components.py | 36 +++ .../source-avni/source_avni/manifest.yaml | 141 +++++++++++ .../source_avni/schemas/subjects.json | 9 +- .../source-avni/source_avni/source.py | 227 +----------------- .../source-avni/source_avni/spec.yaml | 26 -- .../source-avni/unit_tests/test_components.py | 44 ++++ .../unit_tests/test_incremental_streams.py | 61 ----- .../source-avni/unit_tests/test_source.py | 38 --- .../source-avni/unit_tests/test_streams.py | 97 -------- 16 files changed, 282 insertions(+), 574 deletions(-) rename airbyte-integrations/connectors/source-avni/{unit_tests => }/__init__.py (100%) create mode 100644 airbyte-integrations/connectors/source-avni/source_avni/components.py create mode 100644 airbyte-integrations/connectors/source-avni/source_avni/manifest.yaml delete mode 100644 airbyte-integrations/connectors/source-avni/source_avni/spec.yaml create mode 100644 airbyte-integrations/connectors/source-avni/unit_tests/test_components.py delete mode 100644 airbyte-integrations/connectors/source-avni/unit_tests/test_incremental_streams.py delete mode 100644 airbyte-integrations/connectors/source-avni/unit_tests/test_source.py delete mode 100644 airbyte-integrations/connectors/source-avni/unit_tests/test_streams.py diff --git a/airbyte-integrations/connectors/source-avni/Dockerfile b/airbyte-integrations/connectors/source-avni/Dockerfile index 81bafe51cb7c..09b5073bfc31 100644 --- a/airbyte-integrations/connectors/source-avni/Dockerfile +++ b/airbyte-integrations/connectors/source-avni/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.9.13-alpine3.15 as base +FROM python:3.9.11-alpine3.15 as base # build and load all requirements FROM base as builder diff --git a/airbyte-integrations/connectors/source-avni/README.md b/airbyte-integrations/connectors/source-avni/README.md index b49197bce903..075de8d47796 100644 --- a/airbyte-integrations/connectors/source-avni/README.md +++ b/airbyte-integrations/connectors/source-avni/README.md @@ -1,35 +1,10 @@ # Avni Source -This is the repository for the Avni source connector, written in Python. +This is the repository for the Avni configuration based source connector. For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.com/integrations/sources/avni). ## Local development -### Prerequisites -**To iterate on this connector, make sure to complete this prerequisites section.** - -#### Minimum Python version required `= 3.9.0` - -#### Build & Activate Virtual Environment and install dependencies -From this connector directory, create a virtual environment: -``` -python -m venv .venv -``` - -This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your -development environment of choice. To activate it from the terminal, run: -``` -source .venv/bin/activate -pip install -r requirements.txt -pip install '.[tests]' -``` -If you are in an IDE, follow your IDE's instructions to activate the virtualenv. - -Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is -used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. -If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything -should work as you expect. - #### Building via Gradle You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. @@ -47,14 +22,6 @@ See `integration_tests/sample_config.json` for a sample config file. **If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source avni test creds` and place them into `secrets/config.json`. -### Locally running the connector -``` -python main.py spec -python main.py check --config secrets/config.json -python main.py discover --config secrets/config.json -python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json -``` - ### Locally running the connector docker image #### Build @@ -63,12 +30,6 @@ First, make sure you build the latest Docker image: docker build . -t airbyte/source-avni:dev ``` -If you want to build the Docker image with the CDK on your local machine (rather than the most recent package published to pypi), from the airbyte base directory run: -```bash -CONNECTOR_TAG= CONNECTOR_NAME= sh airbyte-integrations/scripts/build-connector-image-with-local-cdk.sh -``` - - You can also build the connector image via Gradle: ``` ./gradlew :airbyte-integrations:connectors:source-avni:airbyteDocker @@ -85,32 +46,15 @@ docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-avni:dev discover --co docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-avni:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json ``` ## Testing -Make sure to familiarize yourself with [pytest test discovery](https://docs.pytest.org/en/latest/goodpractices.html#test-discovery) to know how your test files and methods should be named. -First install test dependencies into your virtual environment: -``` -pip install .[tests] -``` -### Unit Tests -To run unit tests locally, from the connector directory run: -``` -python -m pytest unit_tests -``` -### Integration Tests -There are two types of integration tests: Acceptance Tests (Airbyte's test suite for all source connectors) and custom integration tests (which are specific to this connector). -#### Custom Integration tests -Place custom tests inside `integration_tests/` folder, then, from the connector root, run -``` -python -m pytest integration_tests -``` #### Acceptance Tests Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. -To run your integration tests with acceptance tests, from the connector root, run + +To run your integration tests with Docker, run: ``` -python -m pytest integration_tests -p integration_tests.acceptance +./acceptance-test-docker.sh ``` -To run your integration tests with docker ### Using gradle to run tests All commands should be run from airbyte project root. diff --git a/airbyte-integrations/connectors/source-avni/unit_tests/__init__.py b/airbyte-integrations/connectors/source-avni/__init__.py similarity index 100% rename from airbyte-integrations/connectors/source-avni/unit_tests/__init__.py rename to airbyte-integrations/connectors/source-avni/__init__.py diff --git a/airbyte-integrations/connectors/source-avni/acceptance-test-config.yml b/airbyte-integrations/connectors/source-avni/acceptance-test-config.yml index da3132808563..e21280f168e4 100644 --- a/airbyte-integrations/connectors/source-avni/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-avni/acceptance-test-config.yml @@ -1,31 +1,31 @@ # See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) # for more information about how to configure these tests -connector_image: airbyte/source-avni:0.1.0 +connector_image: airbyte/source-avni:dev acceptance_tests: - spec: - tests: - - spec_path: "source_avni/spec.yaml" - connection: - tests: - - config_path: "secrets/config.json" - status: "succeed" - - config_path: "integration_tests/invalid_config.json" - status: "failed" - discovery: - tests: - - config_path: "secrets/config.json" + # spec: + # tests: + # - spec_path: "source_avni/spec.yaml" + # connection: + # tests: + # - config_path: "secrets/config.json" + # status: "succeed" + # - config_path: "integration_tests/invalid_config.json" + # status: "failed" + # discovery: + # tests: + # - config_path: "secrets/config.json" basic_read: tests: - config_path: "secrets/config.json" configured_catalog_path: "integration_tests/configured_catalog.json" empty_streams: [] - incremental: - tests: - - config_path: "secrets/config.json" - configured_catalog_path: "integration_tests/configured_catalog.json" - future_state: - future_state_path: "integration_tests/abnormal_state.json" - full_refresh: - tests: - - config_path: "secrets/config.json" - configured_catalog_path: "integration_tests/configured_catalog.json" + # incremental: + # tests: + # - config_path: "secrets/config.json" + # configured_catalog_path: "integration_tests/configured_catalog.json" + # future_state: + # future_state_path: "integration_tests/abnormal_state.json" + # full_refresh: + # tests: + # - config_path: "secrets/config.json" + # configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-avni/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-avni/integration_tests/configured_catalog.json index df39258c9271..f3f7d38254c0 100644 --- a/airbyte-integrations/connectors/source-avni/integration_tests/configured_catalog.json +++ b/airbyte-integrations/connectors/source-avni/integration_tests/configured_catalog.json @@ -4,17 +4,7 @@ "stream": { "name": "subjects", "json_schema": { - "properties": { - "username": { - "type": "string" - }, - "password": { - "type": "string" - }, - "lastModifiedDateTime":{ - "type": "string" - } - } + "properties": {} }, "supported_sync_modes": ["full_refresh","incremental"], "source_defined_cursor": true, @@ -30,17 +20,7 @@ "stream": { "name": "program_enrolments", "json_schema": { - "properties": { - "username": { - "type": "string" - }, - "password": { - "type": "string" - }, - "lastModifiedDateTime":{ - "type": "string" - } - } + "properties": {} }, "supported_sync_modes": ["full_refresh","incremental"], "source_defined_cursor": true, @@ -56,17 +36,7 @@ "stream": { "name": "program_encounters", "json_schema": { - "properties": { - "username": { - "type": "string" - }, - "password": { - "type": "string" - }, - "lastModifiedDateTime":{ - "type": "string" - } - } + "properties": {} }, "supported_sync_modes": ["full_refresh","incremental"], "source_defined_cursor": true, @@ -82,17 +52,7 @@ "stream": { "name": "encounters", "json_schema": { - "properties": { - "username": { - "type": "string" - }, - "password": { - "type": "string" - }, - "lastModifiedDateTime":{ - "type": "string" - } - } + "properties": {} }, "supported_sync_modes": ["full_refresh","incremental"], "source_defined_cursor": true, diff --git a/airbyte-integrations/connectors/source-avni/metadata.yaml b/airbyte-integrations/connectors/source-avni/metadata.yaml index dbd0ed9f12fa..79cdb44b311a 100644 --- a/airbyte-integrations/connectors/source-avni/metadata.yaml +++ b/airbyte-integrations/connectors/source-avni/metadata.yaml @@ -1,19 +1,25 @@ data: allowedHosts: hosts: - - "*" # Please change to the hostname of the source. + - "*" registries: oss: enabled: true + cloud: + enabled: true connectorSubtype: api connectorType: source - definitionId: a4adf548-9f40-4eb7-958f-9ff322abd481 + definitionId: 5d297ac7-355e-4a04-be75-a5e7e175fc4e dockerImageTag: 0.1.0 dockerRepository: airbyte/source-avni githubIssueLabel: source-avni icon: avni.svg license: MIT - name: Avni + name: Avni + releaseDate: "2023-09-07" releaseStage: alpha - supportUrl: https://docs.airbyte.com/integrations/sources/avni + supportLevel: community + documentationUrl: https://docs.airbyte.com/integrations/sources/avni + tags: + - language:lowcode metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-avni/setup.py b/airbyte-integrations/connectors/source-avni/setup.py index 9f3fd1532f27..b5f12bb51f55 100644 --- a/airbyte-integrations/connectors/source-avni/setup.py +++ b/airbyte-integrations/connectors/source-avni/setup.py @@ -6,11 +6,12 @@ from setuptools import find_packages, setup MAIN_REQUIREMENTS = [ - "airbyte-cdk~=0.2", + "airbyte-cdk~=0.1", "boto3==1.18.0", ] TEST_REQUIREMENTS = [ + "requests-mock~=1.9.3", "pytest~=6.2", "pytest-mock~=3.6.1", "connector-acceptance-test", diff --git a/airbyte-integrations/connectors/source-avni/source_avni/components.py b/airbyte-integrations/connectors/source-avni/source_avni/components.py new file mode 100644 index 000000000000..ba6d6eb99a6b --- /dev/null +++ b/airbyte-integrations/connectors/source-avni/source_avni/components.py @@ -0,0 +1,36 @@ +from airbyte_cdk.sources.declarative.auth.token import BasicHttpAuthenticator +from dataclasses import dataclass +import boto3 +import requests + + +@dataclass +class CustomAuthenticator(BasicHttpAuthenticator): + + + @property + def token(self) -> str: + + username = self._username.eval(self.config) + password = self._password.eval(self.config) + + app_client_id = self.get_client_id() + + client = boto3.client("cognito-idp", region_name="ap-south-1") + response = client.initiate_auth( + ClientId=app_client_id, AuthFlow="USER_PASSWORD_AUTH", AuthParameters={"USERNAME": username, "PASSWORD": password} + ) + token = response["AuthenticationResult"]["IdToken"] + return token + + @property + def auth_header(self) -> str: + return "auth-token" + + def get_client_id(self): + + url_client = "https://app.avniproject.org/idp-details" + response = requests.get(url_client) + response.raise_for_status() + client = response.json() + return client["cognito"]["clientId"] diff --git a/airbyte-integrations/connectors/source-avni/source_avni/manifest.yaml b/airbyte-integrations/connectors/source-avni/source_avni/manifest.yaml new file mode 100644 index 000000000000..d62154bead30 --- /dev/null +++ b/airbyte-integrations/connectors/source-avni/source_avni/manifest.yaml @@ -0,0 +1,141 @@ +version: "0.29.0" + +definitions: + selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: ["content"] + + requester: + type: HttpRequester + url_base: "https://app.avniproject.org/api" + http_method: "GET" + authenticator: + class_name: source_avni.components.CustomAuthenticator + username: "{{config['username']}}" + password: "{{config['password']}}" + + retriever: + type: SimpleRetriever + record_selector: + $ref: "#/definitions/selector" + paginator: + type: "DefaultPaginator" + page_size_option: + type: "RequestOption" + inject_into: "request_parameter" + field_name: "size" + pagination_strategy: + type: "PageIncrement" + page_size: 100 + page_token_option: + type: "RequestOption" + inject_into: "request_parameter" + field_name: "page" + requester: + $ref: "#/definitions/requester" + + incremental_base: + type: DatetimeBasedCursor + cursor_field: "last_modified_at" + datetime_format: "%Y-%m-%dT%H:%M:%S.%fZ" + start_datetime: + datetime: "{{ config['start_date'] }}" + datetime_format: "%Y-%m-%dT%H:%M:%S.%fZ" + start_time_option: + field_name: "lastModifiedDateTime" + inject_into: "request_parameter" + + transformations_base: + - type: AddFields + fields: + - path: [ "last_modified_at" ] + value: "{{ record['audit']['Last modified at'] }}" + + base_stream: + type: DeclarativeStream + retriever: + $ref: "#/definitions/retriever" + + subjects_stream: + $ref: "#/definitions/base_stream" + name: "subjects" + primary_key: "ID" + incremental_sync: + $ref: "#/definitions/incremental_base" + transformations: + $ref: "#/definitions/transformations_base" + $parameters: + path: "/subjects" + + program_encounters_stream: + $ref: "#/definitions/base_stream" + name: "program_encounters" + primary_key: "ID" + incremental_sync: + $ref: "#/definitions/incremental_base" + transformations: + $ref: "#/definitions/transformations_base" + $parameters: + path: "/programEncounters" + + program_enrolments_stream: + $ref: "#/definitions/base_stream" + name: "program_enrolments" + primary_key: "ID" + incremental_sync: + $ref: "#/definitions/incremental_base" + transformations: + $ref: "#/definitions/transformations_base" + $parameters: + path: "/programEnrolments" + + encounters_stream: + $ref: "#/definitions/base_stream" + name: "encounters" + primary_key: "ID" + incremental_sync: + $ref: "#/definitions/incremental_base" + transformations: + $ref: "#/definitions/transformations_base" + $parameters: + path: "/encounters" + +streams: + - "#/definitions/subjects_stream" + - "#/definitions/program_enrolments_stream" + - "#/definitions/program_encounters_stream" + - "#/definitions/encounters_stream" + +check: + type: CheckStream + stream_names: + - "subjects" + + +spec: + type: Spec + documentation_url: https://docs.airbyte.com/integrations/sources/avni + connection_specification: + title: Avni Spec + type: object + required: + - username + - password + - start_date + additionalProperties: true + properties: + username: + type: string + description: Your avni platform Username + password: + type: string + description: Your avni platform password + airbyte_secret: true + start_date: + type: string + default: "2000-06-23T01:30:00.000Z" + description: Specify Date and time from which you want to fetch data + examples: + - "2000-10-31T01:30:00.000Z" diff --git a/airbyte-integrations/connectors/source-avni/source_avni/schemas/subjects.json b/airbyte-integrations/connectors/source-avni/source_avni/schemas/subjects.json index 8761f2514761..30006a7dcf7b 100644 --- a/airbyte-integrations/connectors/source-avni/source_avni/schemas/subjects.json +++ b/airbyte-integrations/connectors/source-avni/source_avni/schemas/subjects.json @@ -83,8 +83,13 @@ "last_modified_at":{ "type": "string", "format": "YYYY-MM-DDTHH:mm:ss.sssZ" - } - , + }, + "catchments":{ + "type": ["null", "array"], + "items": { + "type": ["null", "string"] + } + }, "audit": { "type": ["null", "object"], "additionalProperties": true, diff --git a/airbyte-integrations/connectors/source-avni/source_avni/source.py b/airbyte-integrations/connectors/source-avni/source_avni/source.py index e12d819d2c8c..e6c65ceadb7d 100644 --- a/airbyte-integrations/connectors/source-avni/source_avni/source.py +++ b/airbyte-integrations/connectors/source-avni/source_avni/source.py @@ -2,224 +2,17 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # -from abc import ABC -from typing import Any, Iterable, List, Mapping, MutableMapping, Optional, Tuple +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource -import boto3 -import requests -from airbyte_cdk.models import SyncMode -from airbyte_cdk.sources import AbstractSource -from airbyte_cdk.sources.streams import IncrementalMixin, Stream -from airbyte_cdk.sources.streams.core import StreamData -from airbyte_cdk.sources.streams.http import HttpStream +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. +WARNING: Do not modify this file. +""" -class AvniStream(HttpStream, ABC): - url_base = "https://app.avniproject.org/api/" - primary_key = "ID" - cursor_value = None - current_page = 0 - last_record = None - - def __init__(self, start_date: str, auth_token: str, **kwargs): - super().__init__(**kwargs) - - self.start_date = start_date - self.auth_token = auth_token - - def request_params( - self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None - ) -> MutableMapping[str, Any]: - - params = {"lastModifiedDateTime": self.state["last_modified_at"]} - if next_page_token: - params.update(next_page_token) - return params - - def request_headers( - self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None - ) -> Mapping[str, Any]: - - return {"auth-token": self.auth_token} - - def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: - - data = response.json()["content"] - if data: - self.last_record = data[-1] - - yield from data - - def read_records( - self, - sync_mode: SyncMode, - cursor_field: List[str] = None, - stream_slice: Mapping[str, Any] = None, - stream_state: Mapping[str, Any] = None, - ) -> Iterable[StreamData]: - - records = super().read_records(sync_mode, cursor_field, stream_slice, stream_state) - for record in records: - last_modified_at = record["audit"]["Last modified at"] - record["last_modified_at"] = last_modified_at - yield record - - def update_state(self) -> None: - - if self.last_record: - updated_last_date = self.last_record["audit"]["Last modified at"] - if updated_last_date > self.state["last_modified_at"]: - self.state = {self.cursor_field: updated_last_date} - self.last_record = None - - return None - - def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: - - total_elements = int(response.json()["totalElements"]) - page_size = int(response.json()["pageSize"]) - - if total_elements == page_size: - self.current_page = self.current_page + 1 - return {"page": self.current_page} - - self.update_state() - - self.current_page = 0 - - return None - - -class IncrementalAvniStream(AvniStream, IncrementalMixin, ABC): - - state_checkpoint_interval = None - - @property - def cursor_field(self) -> str: - return "last_modified_at" - - @property - def state(self) -> Mapping[str, Any]: - - if self.cursor_value: - return {self.cursor_field: self.cursor_value} - else: - return {self.cursor_field: self.start_date} - - @state.setter - def state(self, value: Mapping[str, Any]): - self.cursor_value = value[self.cursor_field] - self._state = value - - -class Subjects(IncrementalAvniStream): - - """ - This implement Subject Stream in Source Avni - Api docs : https://avni.readme.io/docs/api-guide - Api endpoints : https://app.swaggerhub.com/apis-docs/samanvay/avni-external/1.0.0 - """ - - def path(self, **kwargs) -> str: - return "subjects" - - -class ProgramEnrolments(IncrementalAvniStream): - - """ - This implement ProgramEnrolments Stream in Source Avni - Api docs : https://avni.readme.io/docs/api-guide - Api endpoints : https://app.swaggerhub.com/apis-docs/samanvay/avni-external/1.0.0 - """ - - def path(self, **kwargs) -> str: - return "programEnrolments" - - -class ProgramEncounters(IncrementalAvniStream): - - """ - This implement ProgramEncounters Stream in Source Avni - Api docs : https://avni.readme.io/docs/api-guide - Api endpoints : https://app.swaggerhub.com/apis-docs/samanvay/avni-external/1.0.0 - """ - - def path(self, **kwargs) -> str: - return "programEncounters" - - -class Encounters(IncrementalAvniStream): - - """ - This implement Encounters Stream in Source Avni - Api docs : https://avni.readme.io/docs/api-guide - Api endpoints : https://app.swaggerhub.com/apis-docs/samanvay/avni-external/1.0.0 - """ - - def path(self, **kwargs) -> str: - return "encounters" - - -class SourceAvni(AbstractSource): - def get_client_id(self): - - url_client = "https://app.avniproject.org/idp-details" - response = requests.get(url_client) - response.raise_for_status() - client = response.json() - return client["cognito"]["clientId"] - - def get_token(self, username: str, password: str, app_client_id: str) -> str: - - """ - Avni Api Authentication : https://avni.readme.io/docs/api-guide#authentication - AWS Cognito for authentication : https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/cognito-idp/client/initiate_auth.html - """ - - client = boto3.client("cognito-idp", region_name="ap-south-1") - response = client.initiate_auth( - ClientId=app_client_id, AuthFlow="USER_PASSWORD_AUTH", AuthParameters={"USERNAME": username, "PASSWORD": password} - ) - return response["AuthenticationResult"]["IdToken"] - - def check_connection(self, logger, config) -> Tuple[bool, any]: - - username = config["username"] - password = config["password"] - - try: - client_id = self.get_client_id() - except Exception as error: - return False, str(error) + ": Please connect With Avni Team" - - try: - auth_token = self.get_token(username, password, client_id) - stream_kwargs = {"auth_token": auth_token, "start_date": config["start_date"]} - next(Subjects(**stream_kwargs).read_records(SyncMode.full_refresh)) - return True, None - - except Exception as error: - return False, error - - def streams(self, config: Mapping[str, Any]) -> List[Stream]: - - username = config["username"] - password = config["password"] - - try: - client_id = self.get_client_id() - except Exception as error: - print(str(error) + ": Please connect With Avni Team") - raise error - - auth_token = self.get_token(username, password, client_id) - - stream_kwargs = {"auth_token": auth_token, "start_date": config["start_date"]} - - return [ - Subjects(**stream_kwargs), - ProgramEnrolments(**stream_kwargs), - ProgramEncounters(**stream_kwargs), - Encounters(**stream_kwargs), - ] +# Declarative Source +class SourceAvni(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "manifest.yaml"}) diff --git a/airbyte-integrations/connectors/source-avni/source_avni/spec.yaml b/airbyte-integrations/connectors/source-avni/source_avni/spec.yaml deleted file mode 100644 index 2ecbcd34dae2..000000000000 --- a/airbyte-integrations/connectors/source-avni/source_avni/spec.yaml +++ /dev/null @@ -1,26 +0,0 @@ -documentationUrl: https://docs.airbyte.com/integrations/sources/avni -connectionSpecification: - $schema: http://json-schema.org/draft-07/schema# - title: Avni Spec - type: object - required: - - username - - password - - start_date - properties: - username: - type: string - description: Your Avni platform username - title: Username - password: - type: string - description: Your Avni platform password - title: Password - airbyte_secret: true - start_date: - type: string - default: "2000-06-23T01:30:00.000Z" - description: Specify Date and time from which you want to fetch data - title: Start date and time - examples: - - "2000-10-31T01:30:00.000Z" \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-avni/unit_tests/test_components.py b/airbyte-integrations/connectors/source-avni/unit_tests/test_components.py new file mode 100644 index 000000000000..1f8467847f48 --- /dev/null +++ b/airbyte-integrations/connectors/source-avni/unit_tests/test_components.py @@ -0,0 +1,44 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from source_avni.components import CustomAuthenticator +from unittest.mock import Mock, patch + + +@patch('boto3.client') +def test_token_property(mock_boto3_client): + + mock_cognito_client = Mock() + mock_boto3_client.return_value = mock_cognito_client + + config= { "username": "example@gmail.com", "api_key": "api_key" } + source = CustomAuthenticator(config=config,username="example@gmail.com",password="api_key",parameters="") + source._username = Mock() + source._username.eval.return_value = "test_username" + source._password = Mock() + source._password.eval.return_value = "test_password" + source.get_client_id = Mock() + source.get_client_id.return_value = "test_client_id" + + mock_cognito_client.initiate_auth.return_value = { + "AuthenticationResult": { + "IdToken": "test_id_token" + } + } + token = source.token + mock_boto3_client.assert_called_once_with("cognito-idp", region_name="ap-south-1") + mock_cognito_client.initiate_auth.assert_called_once_with( + ClientId="test_client_id", + AuthFlow="USER_PASSWORD_AUTH", + AuthParameters={"USERNAME": "test_username", "PASSWORD": "test_password"} + ) + assert token == "test_id_token" + +def test_get_client_id(mocker): + + config= { "username": "example@gmail.com", "api_key": "api_key" } + source = CustomAuthenticator(config=config,username="example@gmail.com",password="api_key",parameters="") + client_id = source.get_client_id() + expected_length = 26 + assert len(client_id) == expected_length \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-avni/unit_tests/test_incremental_streams.py b/airbyte-integrations/connectors/source-avni/unit_tests/test_incremental_streams.py deleted file mode 100644 index ccd09b924011..000000000000 --- a/airbyte-integrations/connectors/source-avni/unit_tests/test_incremental_streams.py +++ /dev/null @@ -1,61 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -from airbyte_cdk.models import SyncMode -from pytest import fixture -from source_avni.source import IncrementalAvniStream - - -@fixture -def patch_incremental_base_class(mocker): - - mocker.patch.object(IncrementalAvniStream, "path", "v0/example_endpoint") - mocker.patch.object(IncrementalAvniStream, "primary_key", "test_primary_key") - mocker.patch.object(IncrementalAvniStream, "__abstractmethods__", set()) - - -def test_cursor_field(patch_incremental_base_class): - - stream = IncrementalAvniStream(start_date="",auth_token="") - expected_cursor_field = "last_modified_at" - assert stream.cursor_field == expected_cursor_field - - -def test_update_state(patch_incremental_base_class): - - stream = IncrementalAvniStream(start_date="",auth_token="") - stream.state = {"last_modified_at":"2000-06-27T04:18:36.914Z"} - stream.last_record = {"audit":{"Last modified at":"2001-06-27T04:18:36.914Z"}} - expected_state = {"last_modified_at":"2001-06-27T04:18:36.914Z"} - stream.update_state() - assert stream.state == expected_state - - -def test_stream_slices(patch_incremental_base_class): - - stream = IncrementalAvniStream(start_date="",auth_token="") - inputs = {"sync_mode": SyncMode.incremental, "cursor_field": [], "stream_state": {}} - expected_stream_slice = [None] - assert stream.stream_slices(**inputs) == expected_stream_slice - - -def test_supports_incremental(patch_incremental_base_class, mocker): - - mocker.patch.object(IncrementalAvniStream, "cursor_field", "dummy_field") - stream = IncrementalAvniStream(start_date="",auth_token="") - assert stream.supports_incremental - - -def test_source_defined_cursor(patch_incremental_base_class): - - stream = IncrementalAvniStream(start_date="",auth_token="") - assert stream.source_defined_cursor - - -def test_stream_checkpoint_interval(patch_incremental_base_class): - - stream = IncrementalAvniStream(start_date="",auth_token="") - expected_checkpoint_interval = None - assert stream.state_checkpoint_interval == expected_checkpoint_interval diff --git a/airbyte-integrations/connectors/source-avni/unit_tests/test_source.py b/airbyte-integrations/connectors/source-avni/unit_tests/test_source.py deleted file mode 100644 index 7022f4de4ce4..000000000000 --- a/airbyte-integrations/connectors/source-avni/unit_tests/test_source.py +++ /dev/null @@ -1,38 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from unittest.mock import patch - -from source_avni.source import SourceAvni - - -def test_check_connection_success(mocker): - with patch('source_avni.source.SourceAvni.get_client_id') as get_client_id_mock, \ - patch('source_avni.source.SourceAvni.get_token') as get_token_mock, \ - patch('source_avni.source.Subjects.read_records') as read_records_mock: - get_client_id_mock.return_value = "ClientID" - get_token_mock.return_value = "Token" - read_records_mock.return_value = iter(["record1", "record2"]) - source = SourceAvni() - config_mock = {"username": "test_user", "password": "test_password", "start_date": "2000-06-27T04:18:36.914Z"} - result,msg = source.check_connection(None, config_mock) - assert result is True - - -def test_streams(mocker): - - mocker.patch('source_avni.source.SourceAvni.get_token').return_value = 'fake_token' - source = SourceAvni() - config_mock = {"username": "test_user", "password": "test_password", "start_date": "2000-06-27T04:18:36.914Z"} - streams = source.streams(config_mock) - excepted_outcome = 4 - assert len(streams) == excepted_outcome - - -def test_get_client_id(mocker): - - source = SourceAvni() - client_id = source.get_client_id() - expected_length = 26 - assert len(client_id) == expected_length diff --git a/airbyte-integrations/connectors/source-avni/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-avni/unit_tests/test_streams.py deleted file mode 100644 index c0be8d8359d4..000000000000 --- a/airbyte-integrations/connectors/source-avni/unit_tests/test_streams.py +++ /dev/null @@ -1,97 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -from http import HTTPStatus -from unittest.mock import MagicMock - -import pytest -from source_avni.source import AvniStream - - -@pytest.fixture -def patch_base_class(mocker): - - mocker.patch.object(AvniStream, "path", "v0/example_endpoint") - mocker.patch.object(AvniStream, "primary_key", "test_primary_key") - mocker.patch.object(AvniStream, "__abstractmethods__", set()) - - -def test_request_params(mocker,patch_base_class): - - stream = AvniStream(start_date="",auth_token="") - inputs = {"stream_slice": None, "stream_state": None, "next_page_token": {"page":5}} - stream.state = {"last_modified_at":"AnyDate"} - expected_params = {"lastModifiedDateTime":"AnyDate","page":5} - assert stream.request_params(**inputs) == expected_params - - -def test_next_page_token(patch_base_class): - - stream = AvniStream(start_date="",auth_token="") - response_mock = MagicMock() - response_mock.json.return_value = { - "totalElements": 20, - "totalPages": 10, - "pageSize": 20 - } - - stream.current_page = 1 - inputs = {"response": response_mock} - expected_token = {"page": 2} - - assert stream.next_page_token(**inputs) == expected_token - assert stream.current_page == 2 - - -def test_parse_response(patch_base_class,mocker): - - stream = AvniStream(start_date="",auth_token="") - response = MagicMock - response.content = b'{"content": [{"id": 1, "name": "John"}, {"id": 2, "name": "Jane"}]}' - - inputs = {"response": mocker.Mock(json=mocker.Mock(return_value={"content": [{"id": 1, "name": "Avni"}, {"id": 2, "name": "Airbyte"}]}))} - gen = stream.parse_response(**inputs) - assert next(gen) == {"id": 1, "name": "Avni"} - assert next(gen) == {"id": 2, "name": "Airbyte"} - - -def test_request_headers(patch_base_class): - - stream = AvniStream(start_date="",auth_token="") - inputs = {"stream_slice": None, "stream_state": None, "next_page_token": None} - stream.auth_token = "Token" - expected_headers = {"auth-token":"Token"} - assert stream.request_headers(**inputs) == expected_headers - - -def test_http_method(patch_base_class): - - stream = AvniStream(start_date="",auth_token="") - expected_method = "GET" - assert stream.http_method == expected_method - - -@pytest.mark.parametrize( - ("http_status", "should_retry"), - [ - (HTTPStatus.OK, False), - (HTTPStatus.BAD_REQUEST, False), - (HTTPStatus.TOO_MANY_REQUESTS, True), - (HTTPStatus.INTERNAL_SERVER_ERROR, True), - ], -) -def test_should_retry(patch_base_class, http_status, should_retry): - - response_mock = MagicMock() - response_mock.status_code = http_status - stream = AvniStream(start_date="",auth_token="") - assert stream.should_retry(response_mock) == should_retry - - -def test_backoff_time(patch_base_class): - - response_mock = MagicMock() - stream = AvniStream(start_date="",auth_token="") - expected_backoff_time = None - assert stream.backoff_time(response_mock) == expected_backoff_time From c8a6a6250c7feecc78b0b3f4785750437c112770 Mon Sep 17 00:00:00 2001 From: Aviraj Gour Date: Thu, 7 Sep 2023 14:24:35 +0530 Subject: [PATCH 14/38] acceptance tests --- .../source-avni/acceptance-test-config.yml | 44 +++++++++---------- 1 file changed, 22 insertions(+), 22 deletions(-) diff --git a/airbyte-integrations/connectors/source-avni/acceptance-test-config.yml b/airbyte-integrations/connectors/source-avni/acceptance-test-config.yml index e21280f168e4..54ec3ab7e71f 100644 --- a/airbyte-integrations/connectors/source-avni/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-avni/acceptance-test-config.yml @@ -2,30 +2,30 @@ # for more information about how to configure these tests connector_image: airbyte/source-avni:dev acceptance_tests: - # spec: - # tests: - # - spec_path: "source_avni/spec.yaml" - # connection: - # tests: - # - config_path: "secrets/config.json" - # status: "succeed" - # - config_path: "integration_tests/invalid_config.json" - # status: "failed" - # discovery: - # tests: - # - config_path: "secrets/config.json" + spec: + tests: + - spec_path: "source_avni/spec.yaml" + connection: + tests: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + tests: + - config_path: "secrets/config.json" basic_read: tests: - config_path: "secrets/config.json" configured_catalog_path: "integration_tests/configured_catalog.json" empty_streams: [] - # incremental: - # tests: - # - config_path: "secrets/config.json" - # configured_catalog_path: "integration_tests/configured_catalog.json" - # future_state: - # future_state_path: "integration_tests/abnormal_state.json" - # full_refresh: - # tests: - # - config_path: "secrets/config.json" - # configured_catalog_path: "integration_tests/configured_catalog.json" + incremental: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + future_state: + future_state_path: "integration_tests/abnormal_state.json" + full_refresh: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" From 4c07377d55d359090fa7dcd60bbdb161fa66f197 Mon Sep 17 00:00:00 2001 From: Rohit Chatterjee Date: Fri, 15 Sep 2023 13:56:20 +0530 Subject: [PATCH 15/38] added the title fields --- .../connectors/source-avni/source_avni/manifest.yaml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/airbyte-integrations/connectors/source-avni/source_avni/manifest.yaml b/airbyte-integrations/connectors/source-avni/source_avni/manifest.yaml index d62154bead30..1fef4a09dee3 100644 --- a/airbyte-integrations/connectors/source-avni/source_avni/manifest.yaml +++ b/airbyte-integrations/connectors/source-avni/source_avni/manifest.yaml @@ -129,13 +129,16 @@ spec: username: type: string description: Your avni platform Username + title: Username password: type: string description: Your avni platform password + title: Password airbyte_secret: true start_date: type: string default: "2000-06-23T01:30:00.000Z" description: Specify Date and time from which you want to fetch data + title: Start Date examples: - "2000-10-31T01:30:00.000Z" From beedbdf5be358cfdd2ae8d8872fa6f461cd743ae Mon Sep 17 00:00:00 2001 From: Rohit Chatterjee Date: Sat, 30 Sep 2023 19:15:21 +0530 Subject: [PATCH 16/38] formatting --- .../source-glific/source_glific/source.py | 126 ++++++++---------- 1 file changed, 59 insertions(+), 67 deletions(-) diff --git a/airbyte-integrations/connectors/source-glific/source_glific/source.py b/airbyte-integrations/connectors/source-glific/source_glific/source.py index e58fa47ca6b6..6dd06085c3c9 100644 --- a/airbyte-integrations/connectors/source-glific/source_glific/source.py +++ b/airbyte-integrations/connectors/source-glific/source_glific/source.py @@ -17,7 +17,6 @@ from datetime import datetime - stream_json_schema = { "$schema": "http://json-schema.org/draft-07/schema#", "type": "object", @@ -31,9 +30,9 @@ }, } + # Basic full refresh stream class GlificStream(HttpStream, ABC): - primary_key = "id" cursor_value = None latest_updated_date = None @@ -69,47 +68,46 @@ def __init__(self, stream_name: str, url_base: str, pagination_limit: int, crede self.api_url = url_base self.credentials = credentials self.pagination_limit = pagination_limit - self.start_time = config['start_time'] + self.start_time = config["start_time"] self.offset = 0 self.last_record = None @property def url_base(self) -> str: return self.api_url - + @property def name(self) -> str: return self.stream_name - + @property def http_method(self) -> str: """All requests in the glific stream are posts with body""" return "POST" - + def get_json_schema(self) -> dict: """Return json schema of each stream""" return stream_json_schema - - def path(self, *, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None) -> str: + + def path( + self, *, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> str: return "" - + def update_state(self) -> None: - if self.latest_updated_date: - if self.latest_updated_date> self.state['updated_at']: + if self.latest_updated_date > self.state["updated_at"]: self.state = {self.cursor_field: self.latest_updated_date} self.latest_updated_date = None return None def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: - - json_resp = response.json() - if json_resp['data']['organizationExportData'] is not None: - records_str = json_resp['data']['organizationExportData']['data'] + if json_resp["data"]["organizationExportData"] is not None: + records_str = json_resp["data"]["organizationExportData"]["data"] records_obj = json.loads(records_str) - if self.stream_name in records_obj['data']: - records = json.loads(records_str)['data'][f'{self.stream_name}'] + if self.stream_name in records_obj["data"]: + records = json.loads(records_str)["data"][f"{self.stream_name}"] # more records need to be fetched if len(records) == (self.pagination_limit + 1): self.offset += 1 @@ -119,56 +117,62 @@ def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, return None - def request_headers(self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None) -> Mapping[str, Any]: - - return {'authorization': self.credentials['access_token'], 'Content-Type': 'application/json'} + def request_headers( + self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> Mapping[str, Any]: + return {"authorization": self.credentials["access_token"], "Content-Type": "application/json"} + def request_body_json( + self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> Mapping: + query = ( + "query organizationExportData($filter: ExportFilter) { organizationExportData(filter: $filter) {data errors { key message } } }" + ) - def request_body_json(self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None) -> Mapping: - - query = "query organizationExportData($filter: ExportFilter) { organizationExportData(filter: $filter) {data errors { key message } } }" - filter_obj = { - "startTime": self.state['updated_at'], + "startTime": self.state["updated_at"], "offset": self.offset, "limit": self.pagination_limit, - "tables": [self.stream_name] + "tables": [self.stream_name], } if next_page_token is not None: filter_obj["offset"] = next_page_token["offset"] filter_obj["limit"] = next_page_token["limit"] - return {"query": query, "variables": {"filter": filter_obj}} + return {"query": query, "variables": {"filter": filter_obj}} def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: - json_resp = response.json() - if json_resp['data']['organizationExportData'] is not None: - records_str = json_resp['data']['organizationExportData']['data'] + if json_resp["data"]["organizationExportData"] is not None: + records_str = json_resp["data"]["organizationExportData"]["data"] records_obj = json.loads(records_str) - yield from records_obj['data'][self.stream_name] - - - def read_records(self, sync_mode: SyncMode, cursor_field: List[str] = None, stream_slice: Mapping[str, Any] = None, stream_state: Mapping[str, Any] = None,) -> Iterable[StreamData]: + yield from records_obj["data"][self.stream_name] + + def read_records( + self, + sync_mode: SyncMode, + cursor_field: List[str] = None, + stream_slice: Mapping[str, Any] = None, + stream_state: Mapping[str, Any] = None, + ) -> Iterable[StreamData]: records = super().read_records(sync_mode, cursor_field, stream_slice, stream_state) - + for record in records: - if(len(record['updated_at'])==19): - record['updated_at'] = record['updated_at'] + 'Z' + if len(record["updated_at"]) == 19: + record["updated_at"] = record["updated_at"] + "Z" else: - record['updated_at'] = datetime.strptime(record['updated_at'], "%Y-%m-%dT%H:%M:%S.%f").strftime("%Y-%m-%dT%H:%M:%SZ") - + record["updated_at"] = datetime.strptime(record["updated_at"], "%Y-%m-%dT%H:%M:%S.%f").strftime("%Y-%m-%dT%H:%M:%SZ") + if self.latest_updated_date: - if record['updated_at']>self.latest_updated_date: - self.latest_updated_date = record['updated_at'] + if record["updated_at"] > self.latest_updated_date: + self.latest_updated_date = record["updated_at"] else: - self.latest_updated_date = record['updated_at'] + self.latest_updated_date = record["updated_at"] yield record class IncrementalGlificStream(GlificStream, IncrementalMixin, ABC): - state_checkpoint_interval = None @property @@ -177,7 +181,6 @@ def cursor_field(self) -> str: @property def state(self) -> Mapping[str, Any]: - if self.cursor_value: return {self.cursor_field: self.cursor_value} else: @@ -196,7 +199,6 @@ class SourceGlific(AbstractSource): API_URL = "https://api.staging.tides.coloredcow.com/api" PAGINATION_LIMIT = 500 - def check_connection(self, logger, config) -> Tuple[bool, any]: """ Implement a connection check to validate that the user-provided config can be used to connect to the underlying API @@ -208,21 +210,16 @@ def check_connection(self, logger, config) -> Tuple[bool, any]: :param logger: logger object :return Tuple[bool, any]: (True, None) if the input config can be used to connect to the API successfully, (False, error) otherwise. """ - if 'phone' not in config: - logger.info('Phone number missing') + if "phone" not in config: + logger.info("Phone number missing") return False, "Phone number missing" - if 'password' not in config: + if "password" not in config: logger.info("Password missing") return False, "Password missing" endpoint = f"{self.API_URL}/v1/session" - auth_payload = { - "user": { - "phone": config["phone"], - "password": config["password"] - } - } + auth_payload = {"user": {"phone": config["phone"], "password": config["password"]}} response = requests.post(endpoint, json=auth_payload, timeout=30) try: @@ -240,26 +237,21 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: # authenticate and get the credentials for all streams endpoint = f"{self.API_URL}/v1/session" - auth_payload = { - "user": { - "phone": config["phone"], - "password": config["password"] - } - } + auth_payload = {"user": {"phone": config["phone"], "password": config["password"]}} try: response = requests.post(endpoint, json=auth_payload, timeout=30) response.raise_for_status() - credentials = response.json()['data'] + credentials = response.json()["data"] except requests.exceptions.HTTPError: # return empty zero streams since authentication failed return [] - + # fetch the export config for organization/client/user endpoint = f"{self.API_URL}" - headers = {'authorization': credentials['access_token']} + headers = {"authorization": credentials["access_token"]} try: - query = 'query organizationExportConfig { organizationExportConfig { data errors { key message } } }' + query = "query organizationExportConfig { organizationExportConfig { data errors { key message } } }" variables = {} payload = {"query": query, "variables": variables} @@ -269,11 +261,11 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: except requests.exceptions.HTTPError: # return empty zero streams since config could not be fetched return [] - + # construct streams - export_config = json.loads(data['data']['organizationExportConfig']['data']) + export_config = json.loads(data["data"]["organizationExportConfig"]["data"]) streams = [] - for table in export_config['tables']: + for table in export_config["tables"]: stream_obj = IncrementalGlificStream(table, self.API_URL, self.PAGINATION_LIMIT, credentials, config) streams.append(stream_obj) From f013d60ea26a568497c8a687512545c9dd5cee79 Mon Sep 17 00:00:00 2001 From: Rohit Chatterjee Date: Sat, 30 Sep 2023 19:17:34 +0530 Subject: [PATCH 17/38] {data} --- .../connectors/source-glific/source_glific/source.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/airbyte-integrations/connectors/source-glific/source_glific/source.py b/airbyte-integrations/connectors/source-glific/source_glific/source.py index 6dd06085c3c9..9451781587af 100644 --- a/airbyte-integrations/connectors/source-glific/source_glific/source.py +++ b/airbyte-integrations/connectors/source-glific/source_glific/source.py @@ -26,7 +26,11 @@ "type": [ "number", ] - } + }, + "updated_at": {"type": ["string", "null"]}, + "data": { + "type": "object", + }, }, } @@ -169,7 +173,11 @@ def read_records( self.latest_updated_date = record["updated_at"] else: self.latest_updated_date = record["updated_at"] - yield record + retval = {} + retval["id"] = record["id"] + retval["updated_at"] = record["updated_at"] + retval["data"] = record + yield retval class IncrementalGlificStream(GlificStream, IncrementalMixin, ABC): From a9df5442ee299595fd82be7f927c90c96aa66b87 Mon Sep 17 00:00:00 2001 From: Rohit Chatterjee Date: Sat, 30 Sep 2023 19:19:09 +0530 Subject: [PATCH 18/38] new tag --- airbyte-integrations/connectors/source-glific/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/airbyte-integrations/connectors/source-glific/Dockerfile b/airbyte-integrations/connectors/source-glific/Dockerfile index d68fafd35a2a..e437ac8c60dc 100644 --- a/airbyte-integrations/connectors/source-glific/Dockerfile +++ b/airbyte-integrations/connectors/source-glific/Dockerfile @@ -35,4 +35,4 @@ ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] LABEL io.airbyte.version=0.1.0 -LABEL io.airbyte.name=airbyte/source-glific +LABEL io.airbyte.name=tech4dev/source-glific From 3f01a9e1e5ccf69a5d6d2b2b8fea80b89fceb442 Mon Sep 17 00:00:00 2001 From: Rohit Chatterjee Date: Sat, 13 Apr 2024 15:52:44 +0530 Subject: [PATCH 19/38] require user to provide avni base url --- airbyte-integrations/connectors/source-avni/Dockerfile | 2 +- airbyte-integrations/connectors/source-avni/metadata.yaml | 2 +- .../connectors/source-avni/source_avni/components.py | 7 +++---- .../connectors/source-avni/source_avni/manifest.yaml | 8 +++++++- 4 files changed, 12 insertions(+), 7 deletions(-) diff --git a/airbyte-integrations/connectors/source-avni/Dockerfile b/airbyte-integrations/connectors/source-avni/Dockerfile index 09b5073bfc31..cb3759ec2e5e 100644 --- a/airbyte-integrations/connectors/source-avni/Dockerfile +++ b/airbyte-integrations/connectors/source-avni/Dockerfile @@ -34,5 +34,5 @@ COPY source_avni ./source_avni ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.version=0.1.1 LABEL io.airbyte.name=airbyte/source-avni diff --git a/airbyte-integrations/connectors/source-avni/metadata.yaml b/airbyte-integrations/connectors/source-avni/metadata.yaml index 79cdb44b311a..6c1d9b73f1bd 100644 --- a/airbyte-integrations/connectors/source-avni/metadata.yaml +++ b/airbyte-integrations/connectors/source-avni/metadata.yaml @@ -10,7 +10,7 @@ data: connectorSubtype: api connectorType: source definitionId: 5d297ac7-355e-4a04-be75-a5e7e175fc4e - dockerImageTag: 0.1.0 + dockerImageTag: 0.1.1 dockerRepository: airbyte/source-avni githubIssueLabel: source-avni icon: avni.svg diff --git a/airbyte-integrations/connectors/source-avni/source_avni/components.py b/airbyte-integrations/connectors/source-avni/source_avni/components.py index ba6d6eb99a6b..1f7f7f3cf873 100644 --- a/airbyte-integrations/connectors/source-avni/source_avni/components.py +++ b/airbyte-integrations/connectors/source-avni/source_avni/components.py @@ -7,7 +7,6 @@ @dataclass class CustomAuthenticator(BasicHttpAuthenticator): - @property def token(self) -> str: @@ -28,9 +27,9 @@ def auth_header(self) -> str: return "auth-token" def get_client_id(self): - - url_client = "https://app.avniproject.org/idp-details" - response = requests.get(url_client) + + url_client = self.config["url_base"] + "/idp-details" + response = requests.get(url_client, timeout=30) response.raise_for_status() client = response.json() return client["cognito"]["clientId"] diff --git a/airbyte-integrations/connectors/source-avni/source_avni/manifest.yaml b/airbyte-integrations/connectors/source-avni/source_avni/manifest.yaml index 1fef4a09dee3..b769efd25149 100644 --- a/airbyte-integrations/connectors/source-avni/source_avni/manifest.yaml +++ b/airbyte-integrations/connectors/source-avni/source_avni/manifest.yaml @@ -9,12 +9,13 @@ definitions: requester: type: HttpRequester - url_base: "https://app.avniproject.org/api" + url_base: "{{config['url_base']}}/api" http_method: "GET" authenticator: class_name: source_avni.components.CustomAuthenticator username: "{{config['username']}}" password: "{{config['password']}}" + idp_base: "{{config['url_base']}}" retriever: type: SimpleRetriever @@ -123,6 +124,7 @@ spec: required: - username - password + - url_base - start_date additionalProperties: true properties: @@ -135,6 +137,10 @@ spec: description: Your avni platform password title: Password airbyte_secret: true + url_base: + type: string + description: Your avni platform base url, with no trailing slash (/) + title: Base URL (no trailing /) start_date: type: string default: "2000-06-23T01:30:00.000Z" From 4e0e57cec0936725033277637a451a89b08cbccd Mon Sep 17 00:00:00 2001 From: Abhishek-N Date: Fri, 19 Apr 2024 14:47:21 +0530 Subject: [PATCH 20/38] remove helpers and send json_schema --- .../source_surveycto/helpers.py | 66 ------------------- .../source_surveycto/source.py | 46 ++++++++----- 2 files changed, 30 insertions(+), 82 deletions(-) delete mode 100644 airbyte-integrations/connectors/source-surveycto/source_surveycto/helpers.py diff --git a/airbyte-integrations/connectors/source-surveycto/source_surveycto/helpers.py b/airbyte-integrations/connectors/source-surveycto/source_surveycto/helpers.py deleted file mode 100644 index c027f5f0ae5e..000000000000 --- a/airbyte-integrations/connectors/source-surveycto/source_surveycto/helpers.py +++ /dev/null @@ -1,66 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -import base64 - -import requests -from bigquery_schema_generator.generate_schema import SchemaGenerator -from gbqschema_converter.gbqschema_to_jsonschema import json_representation as converter -from requests.adapters import HTTPAdapter -from requests.packages.urllib3.util.retry import Retry - - -class Helpers(object): - @staticmethod - def _base64_encode(string: str) -> str: - return base64.b64encode(string.encode("ascii")).decode("ascii") - - @staticmethod - def call_survey_cto(config, form_id): - server_name = config["server_name"] - start_date = config["start_date"] - user_name_password = f"{config['username']}:{config['password']}" - auth_token = Helpers._base64_encode(user_name_password) - - url = f"https://{server_name}.surveycto.com/" + f"api/v2/forms/data/wide/json/{form_id}?date={start_date}" - - retry_strategy = Retry(total=3, status_forcelist=[429, 409], method_whitelist=["HEAD", "GET", "OPTIONS"]) - adapter = HTTPAdapter(max_retries=retry_strategy) - http = requests.Session() - http.mount("https://", adapter) - http.mount("http://", adapter) - - response = http.get(url, headers={"Authorization": "Basic " + auth_token}) - response_json = response.json() - - if response.status_code != 200 and response_json["error"]: - message = response_json["error"]["message"] - raise Exception(message) - - for data in response_json: - try: - yield data - except Exception as e: - raise e - - return data - - @staticmethod - def get_filter_data(data): - generator = SchemaGenerator(input_format="dict", infer_mode="NULLABLE", preserve_input_sort_order="true") - - schema_map, error_logs = generator.deduce_schema(input_data=data) - schema = generator.flatten_schema(schema_map) - schema_json = converter(schema) - schema = schema_json["definitions"]["element"]["properties"] - return schema - - @staticmethod - def get_json_schema(schema): - json_schema = { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": schema, - } - return json_schema diff --git a/airbyte-integrations/connectors/source-surveycto/source_surveycto/source.py b/airbyte-integrations/connectors/source-surveycto/source_surveycto/source.py index 816b0dad755e..8bb959d20b96 100644 --- a/airbyte-integrations/connectors/source-surveycto/source_surveycto/source.py +++ b/airbyte-integrations/connectors/source-surveycto/source_surveycto/source.py @@ -14,8 +14,25 @@ from airbyte_cdk.sources.streams.http import HttpStream from airbyte_cdk.sources.utils.transform import TransformConfig, TypeTransformer -from .helpers import Helpers +stream_json_schema = { + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "additionalProperties": True, + "properties": { + "KEY": { + "type": [ + "string", + "null", + ] + }, + "endtime": {"type": ["string", "null"]}, + "data": { + "type": "object", + }, + "SubmissionDate": {"type": ["string", "null"]}, + }, +} class SurveyStream(HttpStream, ABC): transformer: TypeTransformer = TypeTransformer(TransformConfig.DefaultSchemaNormalization) @@ -94,13 +111,16 @@ def parse_response( ) -> Iterable[Mapping]: self.response_json = response.json() - for data in self.response_json: - try: - yield data - except Exception as e: - msg = "Encountered an exception parsing schema" - self.logger.exception(msg) - raise e + for record in self.response_json: + record_id = record.get("KEY") + submission_date = record.get("SubmissionDate") + endtime = record.get("endtime") + + retval = {"KEY": record_id, "data": record} + retval["SubmissionDate"] = submission_date + retval["endtime"] = endtime + + yield retval def read_records(self, *args, **kwargs) -> Iterable[Mapping[str, Any]]: for record in super().read_records(*args, **kwargs): @@ -116,10 +136,7 @@ def check_connection(self, logger, config) -> Tuple[bool, Any]: try: for form_id in form_ids: - schema = Helpers.call_survey_cto(config, form_id) - filter_data = Helpers.get_filter_data(schema) - schema_res = Helpers.get_json_schema(filter_data) - stream = SurveyctoStream(config=config, form_id=form_id, schema=schema_res) + stream = SurveyctoStream(config=config, form_id=form_id, schema=stream_json_schema) next(stream.read_records(sync_mode=SyncMode.full_refresh)) return True, None @@ -132,10 +149,7 @@ def generate_streams(self, config: str) -> List[Stream]: streams = [] for form_id in forms: - schema = Helpers.call_survey_cto(config, form_id) - filter_data = Helpers.get_filter_data(schema) - schema_res = Helpers.get_json_schema(filter_data) - stream = SurveyctoStream(config=config, form_id=form_id, schema=schema_res) + stream = SurveyctoStream(config=config, form_id=form_id, schema=stream_json_schema) streams.append(stream) return streams From e15feca304c6f9823c581db79e5e4d6c436e985a Mon Sep 17 00:00:00 2001 From: Abhishek-N Date: Fri, 19 Apr 2024 14:50:39 +0530 Subject: [PATCH 21/38] send data, key, submission time --- .../connectors/source-surveycto/source_surveycto/source.py | 1 + 1 file changed, 1 insertion(+) diff --git a/airbyte-integrations/connectors/source-surveycto/source_surveycto/source.py b/airbyte-integrations/connectors/source-surveycto/source_surveycto/source.py index 8bb959d20b96..67d27eec9900 100644 --- a/airbyte-integrations/connectors/source-surveycto/source_surveycto/source.py +++ b/airbyte-integrations/connectors/source-surveycto/source_surveycto/source.py @@ -112,6 +112,7 @@ def parse_response( self.response_json = response.json() for record in self.response_json: + # send data, key, submission date and endtime record_id = record.get("KEY") submission_date = record.get("SubmissionDate") endtime = record.get("endtime") From 77740183a73e68b8cd7f970c06838c1ae35082cc Mon Sep 17 00:00:00 2001 From: Siddhant Singh Date: Thu, 9 May 2024 13:00:37 +0530 Subject: [PATCH 22/38] added the api for approval status --- .../connectors/source-avni/Dockerfile | 19 ++++---- .../source-avni/source_avni/manifest.yaml | 15 +++++- .../source_avni/schemas/approvalStatuses.json | 46 +++++++++++++++++++ 3 files changed, 68 insertions(+), 12 deletions(-) create mode 100644 airbyte-integrations/connectors/source-avni/source_avni/schemas/approvalStatuses.json diff --git a/airbyte-integrations/connectors/source-avni/Dockerfile b/airbyte-integrations/connectors/source-avni/Dockerfile index cb3759ec2e5e..7cabf21c80e5 100644 --- a/airbyte-integrations/connectors/source-avni/Dockerfile +++ b/airbyte-integrations/connectors/source-avni/Dockerfile @@ -1,33 +1,32 @@ FROM python:3.9.11-alpine3.15 as base -# build and load all requirements +# Build and load all requirements FROM base as builder WORKDIR /airbyte/integration_code -# upgrade pip to the latest version +# Upgrade pip to the latest version and install build dependencies RUN apk --no-cache upgrade \ && pip install --upgrade pip \ - && apk --no-cache add tzdata build-base - + && apk --no-cache add tzdata build-base libffi-dev openssl-dev COPY setup.py ./ -# install necessary packages to a temporary folder +# Install necessary packages to a temporary folder RUN pip install --prefix=/install . -# build a clean environment +# Build a clean environment FROM base WORKDIR /airbyte/integration_code -# copy all loaded and built libraries to a pure basic image +# Copy all loaded and built libraries to a pure basic image COPY --from=builder /install /usr/local -# add default timezone settings +# Add default timezone settings COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime RUN echo "Etc/UTC" > /etc/timezone -# bash is installed for more convenient debugging. +# Bash is installed for more convenient debugging. RUN apk --no-cache add bash -# copy payload code only +# Copy payload code only COPY main.py ./ COPY source_avni ./source_avni diff --git a/airbyte-integrations/connectors/source-avni/source_avni/manifest.yaml b/airbyte-integrations/connectors/source-avni/source_avni/manifest.yaml index b769efd25149..2928117a1e07 100644 --- a/airbyte-integrations/connectors/source-avni/source_avni/manifest.yaml +++ b/airbyte-integrations/connectors/source-avni/source_avni/manifest.yaml @@ -29,7 +29,7 @@ definitions: field_name: "size" pagination_strategy: type: "PageIncrement" - page_size: 100 + page_size: 20 page_token_option: type: "RequestOption" inject_into: "request_parameter" @@ -103,18 +103,29 @@ definitions: $parameters: path: "/encounters" + approval_status_stream: + $ref: "#/definitions/base_stream" + name: "approvalStatuses" + primary_key: "Entity ID" + incremental_sync: + $ref: "#/definitions/incremental_base" + transformations: + $ref: "#/definitions/transformations_base" + $parameters: + path: "/approvalStatuses" + streams: - "#/definitions/subjects_stream" - "#/definitions/program_enrolments_stream" - "#/definitions/program_encounters_stream" - "#/definitions/encounters_stream" + - "#/definitions/approval_status_stream" check: type: CheckStream stream_names: - "subjects" - spec: type: Spec documentation_url: https://docs.airbyte.com/integrations/sources/avni diff --git a/airbyte-integrations/connectors/source-avni/source_avni/schemas/approvalStatuses.json b/airbyte-integrations/connectors/source-avni/source_avni/schemas/approvalStatuses.json new file mode 100644 index 000000000000..e8e857f8ba56 --- /dev/null +++ b/airbyte-integrations/connectors/source-avni/source_avni/schemas/approvalStatuses.json @@ -0,0 +1,46 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "additionalProperties": true, + "properties": { + "Entity ID": { + "type": "string" + }, + "Entity type": { + "type": "string" + }, + "Entity type ID": { + "type": "string" + }, + "Approval status": { + "type": "string" + }, + "Approval status comment": { + "type": ["null", "string"] + }, + "Status date time": { + "type": "string", + "format": "date-time" + }, + "audit": { + "type": "object", + "properties": { + "Created at": { + "type": "string", + "format": "date-time" + }, + "Last modified at": { + "type": ["null", "string"], + "format": "YYYY-MM-DDTHH:mm:ss.sssZ" + }, + "Created by": { + "type": "string" + }, + "Last modified by": { + "type": "string" + } + } + } + } + } + \ No newline at end of file From afe35a567bf51b30254ffb66bb064494af866aaa Mon Sep 17 00:00:00 2001 From: Rohit Chatterjee Date: Fri, 10 May 2024 10:51:39 +0530 Subject: [PATCH 23/38] added last_modified_at --- .../source-avni/source_avni/schemas/approvalStatuses.json | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/airbyte-integrations/connectors/source-avni/source_avni/schemas/approvalStatuses.json b/airbyte-integrations/connectors/source-avni/source_avni/schemas/approvalStatuses.json index e8e857f8ba56..f5241fe241d4 100644 --- a/airbyte-integrations/connectors/source-avni/source_avni/schemas/approvalStatuses.json +++ b/airbyte-integrations/connectors/source-avni/source_avni/schemas/approvalStatuses.json @@ -22,6 +22,10 @@ "type": "string", "format": "date-time" }, + "last_modified_at": { + "type": "string", + "format": "YYYY-MM-DDTHH:mm:ss.sssZ" + }, "audit": { "type": "object", "properties": { @@ -43,4 +47,4 @@ } } } - \ No newline at end of file + From b599facb0c2c03b6b1a87304c9d5964652f16344 Mon Sep 17 00:00:00 2001 From: Rohit Chatterjee Date: Fri, 10 May 2024 10:51:43 +0530 Subject: [PATCH 24/38] added pytz --- airbyte-integrations/connectors/source-avni/setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/airbyte-integrations/connectors/source-avni/setup.py b/airbyte-integrations/connectors/source-avni/setup.py index b5f12bb51f55..4aead2c068a7 100644 --- a/airbyte-integrations/connectors/source-avni/setup.py +++ b/airbyte-integrations/connectors/source-avni/setup.py @@ -8,6 +8,7 @@ MAIN_REQUIREMENTS = [ "airbyte-cdk~=0.1", "boto3==1.18.0", + "pytz==2024.1", ] TEST_REQUIREMENTS = [ From 6e278fd3e50b8fc1cd1516a70430a81d69c778d4 Mon Sep 17 00:00:00 2001 From: Rohit Chatterjee Date: Fri, 10 May 2024 17:24:39 +0530 Subject: [PATCH 25/38] page size = 1000 specify order in config properties --- .../connectors/source-avni/source_avni/manifest.yaml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/airbyte-integrations/connectors/source-avni/source_avni/manifest.yaml b/airbyte-integrations/connectors/source-avni/source_avni/manifest.yaml index 2928117a1e07..a0cd35becc16 100644 --- a/airbyte-integrations/connectors/source-avni/source_avni/manifest.yaml +++ b/airbyte-integrations/connectors/source-avni/source_avni/manifest.yaml @@ -29,7 +29,7 @@ definitions: field_name: "size" pagination_strategy: type: "PageIncrement" - page_size: 20 + page_size: 1000 page_token_option: type: "RequestOption" inject_into: "request_parameter" @@ -143,19 +143,23 @@ spec: type: string description: Your avni platform Username title: Username + order: 1 password: type: string description: Your avni platform password title: Password airbyte_secret: true + order: 2 url_base: type: string description: Your avni platform base url, with no trailing slash (/) title: Base URL (no trailing /) + order: 3 start_date: type: string default: "2000-06-23T01:30:00.000Z" description: Specify Date and time from which you want to fetch data title: Start Date + order: 4 examples: - "2000-10-31T01:30:00.000Z" From df8f7b327961e02c63bda3dc1234a270c5a5d0af Mon Sep 17 00:00:00 2001 From: Rohit Chatterjee Date: Tue, 14 May 2024 12:38:59 +0530 Subject: [PATCH 26/38] configurable api endpoint --- .../source-glific/source_glific/source.py | 29 ++++++++++--------- .../source-glific/source_glific/spec.yaml | 8 +++++ 2 files changed, 24 insertions(+), 13 deletions(-) diff --git a/airbyte-integrations/connectors/source-glific/source_glific/source.py b/airbyte-integrations/connectors/source-glific/source_glific/source.py index 9451781587af..4e8a1f9111bb 100644 --- a/airbyte-integrations/connectors/source-glific/source_glific/source.py +++ b/airbyte-integrations/connectors/source-glific/source_glific/source.py @@ -4,17 +4,16 @@ from abc import ABC -from typing import Any, Iterable, List, Mapping, MutableMapping, Optional, Tuple, Union -from airbyte_cdk.models import SyncMode -from airbyte_cdk.sources.streams.http.auth.core import HttpAuthenticator - -import requests +from typing import Any, Iterable, List, Mapping, Optional, Tuple import json +from datetime import datetime +import requests + +from airbyte_cdk.models import SyncMode from airbyte_cdk.sources import AbstractSource from airbyte_cdk.sources.streams import Stream, IncrementalMixin from airbyte_cdk.sources.streams.http import HttpStream from airbyte_cdk.sources.streams.core import StreamData -from datetime import datetime stream_json_schema = { @@ -75,6 +74,7 @@ def __init__(self, stream_name: str, url_base: str, pagination_limit: int, crede self.start_time = config["start_time"] self.offset = 0 self.last_record = None + self.state = {} @property def url_base(self) -> str: @@ -100,8 +100,8 @@ def path( def update_state(self) -> None: if self.latest_updated_date: - if self.latest_updated_date > self.state["updated_at"]: - self.state = {self.cursor_field: self.latest_updated_date} + if not self.state or self.latest_updated_date > self.state["updated_at"]: + self.state = {"updated_at": self.latest_updated_date} self.latest_updated_date = None return None @@ -204,7 +204,6 @@ def state(self, value: Mapping[str, Any]): class SourceGlific(AbstractSource): """Glific source""" - API_URL = "https://api.staging.tides.coloredcow.com/api" PAGINATION_LIMIT = 500 def check_connection(self, logger, config) -> Tuple[bool, any]: @@ -226,7 +225,9 @@ def check_connection(self, logger, config) -> Tuple[bool, any]: logger.info("Password missing") return False, "Password missing" - endpoint = f"{self.API_URL}/v1/session" + api_url = config["glific_url"] + + endpoint = f"{api_url}/v1/session" auth_payload = {"user": {"phone": config["phone"], "password": config["password"]}} response = requests.post(endpoint, json=auth_payload, timeout=30) @@ -243,8 +244,10 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: :param config: A Mapping of the user input configuration as defined in the connector spec. """ + api_url = config["glific_url"] + # authenticate and get the credentials for all streams - endpoint = f"{self.API_URL}/v1/session" + endpoint = f"{api_url}/v1/session" auth_payload = {"user": {"phone": config["phone"], "password": config["password"]}} try: response = requests.post(endpoint, json=auth_payload, timeout=30) @@ -255,7 +258,7 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: return [] # fetch the export config for organization/client/user - endpoint = f"{self.API_URL}" + endpoint = api_url headers = {"authorization": credentials["access_token"]} try: @@ -274,7 +277,7 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: export_config = json.loads(data["data"]["organizationExportConfig"]["data"]) streams = [] for table in export_config["tables"]: - stream_obj = IncrementalGlificStream(table, self.API_URL, self.PAGINATION_LIMIT, credentials, config) + stream_obj = IncrementalGlificStream(table, api_url, self.PAGINATION_LIMIT, credentials, config) streams.append(stream_obj) return streams diff --git a/airbyte-integrations/connectors/source-glific/source_glific/spec.yaml b/airbyte-integrations/connectors/source-glific/source_glific/spec.yaml index 2e649192c6f2..7e3ef2135e65 100644 --- a/airbyte-integrations/connectors/source-glific/source_glific/spec.yaml +++ b/airbyte-integrations/connectors/source-glific/source_glific/spec.yaml @@ -26,3 +26,11 @@ connectionSpecification: order: 2 default: "2023-01-26T11:11:11Z" pattern: ^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$ + glific_url: + type: string + title: Glific URL + description: URL of the Glific instance + order: 3 + default: "https://api.staging.glific.com/api" + pattern: ^https://[a-zA-Z0-9.-]+$ + From 1f48a4d53b98544604f00c5bef2898c492a9d5d9 Mon Sep 17 00:00:00 2001 From: Rohit Chatterjee Date: Tue, 14 May 2024 12:51:27 +0530 Subject: [PATCH 27/38] allow forward slash in the url pattern escaped the backslashes and the dash --- .../connectors/source-glific/source_glific/spec.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/airbyte-integrations/connectors/source-glific/source_glific/spec.yaml b/airbyte-integrations/connectors/source-glific/source_glific/spec.yaml index 7e3ef2135e65..55732522f247 100644 --- a/airbyte-integrations/connectors/source-glific/source_glific/spec.yaml +++ b/airbyte-integrations/connectors/source-glific/source_glific/spec.yaml @@ -32,5 +32,5 @@ connectionSpecification: description: URL of the Glific instance order: 3 default: "https://api.staging.glific.com/api" - pattern: ^https://[a-zA-Z0-9.-]+$ + pattern: ^https:\/\/[a-zA-Z0-9.\-\/]+$ From 97a3ef15bf42874152a299ac686feb2926b1de79 Mon Sep 17 00:00:00 2001 From: Rohit Chatterjee Date: Tue, 14 May 2024 13:07:51 +0530 Subject: [PATCH 28/38] state can't be {} --- .../connectors/source-glific/source_glific/source.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/airbyte-integrations/connectors/source-glific/source_glific/source.py b/airbyte-integrations/connectors/source-glific/source_glific/source.py index 4e8a1f9111bb..b894a2c27b38 100644 --- a/airbyte-integrations/connectors/source-glific/source_glific/source.py +++ b/airbyte-integrations/connectors/source-glific/source_glific/source.py @@ -74,7 +74,7 @@ def __init__(self, stream_name: str, url_base: str, pagination_limit: int, crede self.start_time = config["start_time"] self.offset = 0 self.last_record = None - self.state = {} + self.state = {"updated_at": "2020-01-01T00:00:00Z"} @property def url_base(self) -> str: @@ -100,7 +100,7 @@ def path( def update_state(self) -> None: if self.latest_updated_date: - if not self.state or self.latest_updated_date > self.state["updated_at"]: + if self.latest_updated_date > self.state["updated_at"]: self.state = {"updated_at": self.latest_updated_date} self.latest_updated_date = None return None From ff804da4b3e9e9ae0f04b4e36418668c0558f8a9 Mon Sep 17 00:00:00 2001 From: Rohit Chatterjee Date: Tue, 14 May 2024 14:24:24 +0530 Subject: [PATCH 29/38] final tweaks --- .../connectors/source-glific/source_glific/source.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/airbyte-integrations/connectors/source-glific/source_glific/source.py b/airbyte-integrations/connectors/source-glific/source_glific/source.py index b894a2c27b38..cd14e2efb6c8 100644 --- a/airbyte-integrations/connectors/source-glific/source_glific/source.py +++ b/airbyte-integrations/connectors/source-glific/source_glific/source.py @@ -74,7 +74,6 @@ def __init__(self, stream_name: str, url_base: str, pagination_limit: int, crede self.start_time = config["start_time"] self.offset = 0 self.last_record = None - self.state = {"updated_at": "2020-01-01T00:00:00Z"} @property def url_base(self) -> str: @@ -196,7 +195,7 @@ def state(self) -> Mapping[str, Any]: @state.setter def state(self, value: Mapping[str, Any]): - self.cursor_value = value[self.cursor_field] + self.cursor_value = value.get(self.cursor_field) self._state = value From 56f6f5271f20dcacd191755d92a12afd8d41fd86 Mon Sep 17 00:00:00 2001 From: Rohit Chatterjee Date: Wed, 29 May 2024 09:10:58 +0530 Subject: [PATCH 30/38] locations stream --- .../connectors/source-avni/source_avni/manifest.yaml | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/airbyte-integrations/connectors/source-avni/source_avni/manifest.yaml b/airbyte-integrations/connectors/source-avni/source_avni/manifest.yaml index 4d53b576e93d..2b58b6686df6 100644 --- a/airbyte-integrations/connectors/source-avni/source_avni/manifest.yaml +++ b/airbyte-integrations/connectors/source-avni/source_avni/manifest.yaml @@ -114,12 +114,24 @@ definitions: $parameters: path: "/approvalStatuses" + locations_stream: + $ref: "#/definitions/base_stream" + name: "locations" + primary_key: "Entity ID" + incremental_sync: + $ref: "#/definitions/incremental_base" + transformations: + $ref: "#/definitions/transformations_base" + $parameters: + path: "/locations" + streams: - "#/definitions/subjects_stream" - "#/definitions/program_enrolments_stream" - "#/definitions/program_encounters_stream" - "#/definitions/encounters_stream" - "#/definitions/approval_status_stream" + - "#/definitions/locations_stream" check: type: CheckStream From 199f77292d829744fd402a1720c3a5a7274387d9 Mon Sep 17 00:00:00 2001 From: Rohit Chatterjee Date: Thu, 30 May 2024 15:24:17 +0530 Subject: [PATCH 31/38] added schema for locations also locations has ID not Entity ID --- .../source-avni/source_avni/manifest.yaml | 2 +- .../source_avni/schemas/locations.json | 74 +++++++++++++++++++ 2 files changed, 75 insertions(+), 1 deletion(-) create mode 100644 airbyte-integrations/connectors/source-avni/source_avni/schemas/locations.json diff --git a/airbyte-integrations/connectors/source-avni/source_avni/manifest.yaml b/airbyte-integrations/connectors/source-avni/source_avni/manifest.yaml index 2b58b6686df6..765f0fee5b05 100644 --- a/airbyte-integrations/connectors/source-avni/source_avni/manifest.yaml +++ b/airbyte-integrations/connectors/source-avni/source_avni/manifest.yaml @@ -117,7 +117,7 @@ definitions: locations_stream: $ref: "#/definitions/base_stream" name: "locations" - primary_key: "Entity ID" + primary_key: "ID" incremental_sync: $ref: "#/definitions/incremental_base" transformations: diff --git a/airbyte-integrations/connectors/source-avni/source_avni/schemas/locations.json b/airbyte-integrations/connectors/source-avni/source_avni/schemas/locations.json new file mode 100644 index 000000000000..2abf485be798 --- /dev/null +++ b/airbyte-integrations/connectors/source-avni/source_avni/schemas/locations.json @@ -0,0 +1,74 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "additionalProperties": true, + "properties": { + "ID": { + "type": "string" + }, + "External ID": { + "type": [ + "null", + "string" + ] + }, + "Title": { + "type": "string" + }, + "Type": { + "type": "string" + }, + "Level": { + "type": "number" + }, + "Voided": { + "type": "boolean" + }, + "customProperties": { + "type": [ + "null", + "object" + ], + "additionalProperties": true + }, + "last_modified_at": { + "type": "string", + "format": "YYYY-MM-DDTHH:mm:ss.sssZ" + }, + "audit": { + "type": [ + "null", + "object" + ], + "additionalProperties": true, + "properties": { + "Created at": { + "type": [ + "null", + "string" + ], + "format": "YYYY-MM-DDTHH:mm:ss.sssZ" + }, + "Last modified at": { + "type": [ + "null", + "string" + ], + "format": "YYYY-MM-DDTHH:mm:ss.sssZ" + }, + "Created by": { + "type": [ + "null", + "string" + ] + }, + "Last modified by": { + "type": [ + "null", + "string" + ] + } + } + } + } +} \ No newline at end of file From 8211a6b21ffd323d24633e16c162f5effa9a6e1c Mon Sep 17 00:00:00 2001 From: Rohit Chatterjee Date: Mon, 3 Jun 2024 14:31:41 +0530 Subject: [PATCH 32/38] added Parent --- .../source-avni/source_avni/schemas/locations.json | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/airbyte-integrations/connectors/source-avni/source_avni/schemas/locations.json b/airbyte-integrations/connectors/source-avni/source_avni/schemas/locations.json index 2abf485be798..354076ab700b 100644 --- a/airbyte-integrations/connectors/source-avni/source_avni/schemas/locations.json +++ b/airbyte-integrations/connectors/source-avni/source_avni/schemas/locations.json @@ -35,6 +35,13 @@ "type": "string", "format": "YYYY-MM-DDTHH:mm:ss.sssZ" }, + "Parent": { + "type": [ + "null", + "object" + ], + "additionalProperties": true + }, "audit": { "type": [ "null", From 1a67e4fa3b4d78383fea2037f913626816199af4 Mon Sep 17 00:00:00 2001 From: Rohit Chatterjee Date: Tue, 30 Jul 2024 16:23:21 +0530 Subject: [PATCH 33/38] first working version --- .../connectors/source-mgramseva/Dockerfile | 39 + .../connectors/source-mgramseva/README.md | 105 ++ .../acceptance-test-config.yml | 37 + .../integration_tests/__init__.py | 3 + .../integration_tests/abnormal_state.json | 5 + .../integration_tests/acceptance.py | 16 + .../integration_tests/configured_catalog.json | 22 + .../integration_tests/invalid_config.json | 3 + .../integration_tests/sample_config.json | 3 + .../integration_tests/sample_state.json | 5 + .../connectors/source-mgramseva/main.py | 8 + .../connectors/source-mgramseva/metadata.yaml | 34 + .../connectors/source-mgramseva/poetry.lock | 1315 +++++++++++++++++ .../source-mgramseva/pyproject.toml | 28 + .../connectors/source-mgramseva/setup.py | 46 + .../source_mgramseva/__init__.py | 8 + .../source-mgramseva/source_mgramseva/run.py | 13 + .../source_mgramseva/schemas/TODO.md | 30 + .../schemas/mgramseva_bills.json | 12 + .../schemas/mgramseva_demands.json | 12 + .../schemas/mgramseva_payments.json | 12 + .../schemas/mgramseva_tenant_expenses.json | 12 + .../source_mgramseva/source.py | 281 ++++ .../source_mgramseva/spec.yaml | 41 + .../source-mgramseva/unit_tests/__init__.py | 3 + .../unit_tests/test_incremental_streams.py | 59 + .../unit_tests/test_source.py | 22 + .../unit_tests/test_streams.py | 83 ++ 28 files changed, 2257 insertions(+) create mode 100644 airbyte-integrations/connectors/source-mgramseva/Dockerfile create mode 100644 airbyte-integrations/connectors/source-mgramseva/README.md create mode 100644 airbyte-integrations/connectors/source-mgramseva/acceptance-test-config.yml create mode 100644 airbyte-integrations/connectors/source-mgramseva/integration_tests/__init__.py create mode 100644 airbyte-integrations/connectors/source-mgramseva/integration_tests/abnormal_state.json create mode 100644 airbyte-integrations/connectors/source-mgramseva/integration_tests/acceptance.py create mode 100644 airbyte-integrations/connectors/source-mgramseva/integration_tests/configured_catalog.json create mode 100644 airbyte-integrations/connectors/source-mgramseva/integration_tests/invalid_config.json create mode 100644 airbyte-integrations/connectors/source-mgramseva/integration_tests/sample_config.json create mode 100644 airbyte-integrations/connectors/source-mgramseva/integration_tests/sample_state.json create mode 100644 airbyte-integrations/connectors/source-mgramseva/main.py create mode 100644 airbyte-integrations/connectors/source-mgramseva/metadata.yaml create mode 100644 airbyte-integrations/connectors/source-mgramseva/poetry.lock create mode 100644 airbyte-integrations/connectors/source-mgramseva/pyproject.toml create mode 100644 airbyte-integrations/connectors/source-mgramseva/setup.py create mode 100644 airbyte-integrations/connectors/source-mgramseva/source_mgramseva/__init__.py create mode 100644 airbyte-integrations/connectors/source-mgramseva/source_mgramseva/run.py create mode 100644 airbyte-integrations/connectors/source-mgramseva/source_mgramseva/schemas/TODO.md create mode 100644 airbyte-integrations/connectors/source-mgramseva/source_mgramseva/schemas/mgramseva_bills.json create mode 100644 airbyte-integrations/connectors/source-mgramseva/source_mgramseva/schemas/mgramseva_demands.json create mode 100644 airbyte-integrations/connectors/source-mgramseva/source_mgramseva/schemas/mgramseva_payments.json create mode 100644 airbyte-integrations/connectors/source-mgramseva/source_mgramseva/schemas/mgramseva_tenant_expenses.json create mode 100644 airbyte-integrations/connectors/source-mgramseva/source_mgramseva/source.py create mode 100644 airbyte-integrations/connectors/source-mgramseva/source_mgramseva/spec.yaml create mode 100644 airbyte-integrations/connectors/source-mgramseva/unit_tests/__init__.py create mode 100644 airbyte-integrations/connectors/source-mgramseva/unit_tests/test_incremental_streams.py create mode 100644 airbyte-integrations/connectors/source-mgramseva/unit_tests/test_source.py create mode 100644 airbyte-integrations/connectors/source-mgramseva/unit_tests/test_streams.py diff --git a/airbyte-integrations/connectors/source-mgramseva/Dockerfile b/airbyte-integrations/connectors/source-mgramseva/Dockerfile new file mode 100644 index 000000000000..265692d9d30f --- /dev/null +++ b/airbyte-integrations/connectors/source-mgramseva/Dockerfile @@ -0,0 +1,39 @@ +FROM python:3.9.15-slim-bullseye as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apt-get update && apt-get install -y && rm -rf /var/lib/apt/lists/* \ + && pip install --upgrade pip \ + && python3 -m pip install --upgrade setuptools + +RUN DEBIAN_FRONTEND=noninteractive TZ=Etc/UTC apt-get -y install tzdata + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +# RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_mgramseva ./source_mgramseva + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=tech4dev/source-mgramseva diff --git a/airbyte-integrations/connectors/source-mgramseva/README.md b/airbyte-integrations/connectors/source-mgramseva/README.md new file mode 100644 index 000000000000..dde87da0d594 --- /dev/null +++ b/airbyte-integrations/connectors/source-mgramseva/README.md @@ -0,0 +1,105 @@ +# Mgramseva Source + +This is the repository for the Mgramseva source connector, written in Python. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.com/integrations/sources/mgramseva). + +## Local development + +### Prerequisites + +* Python (`^3.9`) +* Poetry (`^1.7`) - installation instructions [here](https://python-poetry.org/docs/#installation) + + + +### Installing the connector + +From this connector directory, run: +```bash +poetry install --with dev +``` + + +### Create credentials + +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.com/integrations/sources/mgramseva) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `src/source_mgramseva/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `sample_files/sample_config.json` for a sample config file. + + +### Locally running the connector + +``` +poetry run source-mgramseva spec +poetry run source-mgramseva check --config secrets/config.json +poetry run source-mgramseva discover --config secrets/config.json +poetry run source-mgramseva read --config secrets/config.json --catalog sample_files/configured_catalog.json +``` + +### Running tests + +To run tests locally, from the connector directory run: + +``` +poetry run pytest tests +``` + +### Building the docker image + +1. Install [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) +2. Run the following command to build the docker image: +```bash +airbyte-ci connectors --name=source-mgramseva build +``` + +An image will be available on your host with the tag `airbyte/source-mgramseva:dev`. + + +### Running as a docker container + +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-mgramseva:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-mgramseva:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-mgramseva:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-mgramseva:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` + +### Running our CI test suite + +You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): + +```bash +airbyte-ci connectors --name=source-mgramseva test +``` + +### Customizing acceptance Tests + +Customize `acceptance-test-config.yml` file to configure acceptance tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +### Dependency Management + +All of your dependencies should be managed via Poetry. +To add a new dependency, run: + +```bash +poetry add +``` + +Please commit the changes to `pyproject.toml` and `poetry.lock` files. + +## Publishing a new version of the connector + +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=source-mgramseva test` +2. Bump the connector version (please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors)): + - bump the `dockerImageTag` value in in `metadata.yaml` + - bump the `version` value in `pyproject.toml` +3. Make sure the `metadata.yaml` content is up to date. +4. Make sure the connector documentation and its changelog is up to date (`docs/integrations/sources/mgramseva.md`). +5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). +6. Pat yourself on the back for being an awesome contributor. +7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. +8. Once your PR is merged, the new version of the connector will be automatically published to Docker Hub and our connector registry. \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-mgramseva/acceptance-test-config.yml b/airbyte-integrations/connectors/source-mgramseva/acceptance-test-config.yml new file mode 100644 index 000000000000..4ff9d7d67ae2 --- /dev/null +++ b/airbyte-integrations/connectors/source-mgramseva/acceptance-test-config.yml @@ -0,0 +1,37 @@ +# See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-mgramseva:dev +acceptance_tests: + spec: + tests: + - spec_path: "source_mgramseva/spec.yaml" + connection: + tests: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + tests: + - config_path: "secrets/config.json" + basic_read: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] +# TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file +# expect_records: +# path: "integration_tests/expected_records.jsonl" +# exact_order: no + incremental: + bypass_reason: "This connector does not implement incremental sync" +# TODO uncomment this block this block if your connector implements incremental sync: +# tests: +# - config_path: "secrets/config.json" +# configured_catalog_path: "integration_tests/configured_catalog.json" +# future_state: +# future_state_path: "integration_tests/abnormal_state.json" + full_refresh: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-mgramseva/integration_tests/__init__.py b/airbyte-integrations/connectors/source-mgramseva/integration_tests/__init__.py new file mode 100644 index 000000000000..66f6de8cb2bb --- /dev/null +++ b/airbyte-integrations/connectors/source-mgramseva/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-mgramseva/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-mgramseva/integration_tests/abnormal_state.json new file mode 100644 index 000000000000..52b0f2c2118f --- /dev/null +++ b/airbyte-integrations/connectors/source-mgramseva/integration_tests/abnormal_state.json @@ -0,0 +1,5 @@ +{ + "todo-stream-name": { + "todo-field-name": "todo-abnormal-value" + } +} diff --git a/airbyte-integrations/connectors/source-mgramseva/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-mgramseva/integration_tests/acceptance.py new file mode 100644 index 000000000000..aaeb7f6c2529 --- /dev/null +++ b/airbyte-integrations/connectors/source-mgramseva/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("connector_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-mgramseva/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-mgramseva/integration_tests/configured_catalog.json new file mode 100644 index 000000000000..36f0468db0d8 --- /dev/null +++ b/airbyte-integrations/connectors/source-mgramseva/integration_tests/configured_catalog.json @@ -0,0 +1,22 @@ +{ + "streams": [ + { + "stream": { + "name": "customers", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "employees", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append" + } + ] +} diff --git a/airbyte-integrations/connectors/source-mgramseva/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-mgramseva/integration_tests/invalid_config.json new file mode 100644 index 000000000000..f3732995784f --- /dev/null +++ b/airbyte-integrations/connectors/source-mgramseva/integration_tests/invalid_config.json @@ -0,0 +1,3 @@ +{ + "todo-wrong-field": "this should be an incomplete config file, used in standard tests" +} diff --git a/airbyte-integrations/connectors/source-mgramseva/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-mgramseva/integration_tests/sample_config.json new file mode 100644 index 000000000000..ecc4913b84c7 --- /dev/null +++ b/airbyte-integrations/connectors/source-mgramseva/integration_tests/sample_config.json @@ -0,0 +1,3 @@ +{ + "fix-me": "TODO" +} diff --git a/airbyte-integrations/connectors/source-mgramseva/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-mgramseva/integration_tests/sample_state.json new file mode 100644 index 000000000000..3587e579822d --- /dev/null +++ b/airbyte-integrations/connectors/source-mgramseva/integration_tests/sample_state.json @@ -0,0 +1,5 @@ +{ + "todo-stream-name": { + "todo-field-name": "value" + } +} diff --git a/airbyte-integrations/connectors/source-mgramseva/main.py b/airbyte-integrations/connectors/source-mgramseva/main.py new file mode 100644 index 000000000000..a113d66205d0 --- /dev/null +++ b/airbyte-integrations/connectors/source-mgramseva/main.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. +# + +from source_mgramseva.run import run + +if __name__ == "__main__": + run() diff --git a/airbyte-integrations/connectors/source-mgramseva/metadata.yaml b/airbyte-integrations/connectors/source-mgramseva/metadata.yaml new file mode 100644 index 000000000000..e912c479d181 --- /dev/null +++ b/airbyte-integrations/connectors/source-mgramseva/metadata.yaml @@ -0,0 +1,34 @@ +data: + allowedHosts: + hosts: + - TODO # Please change to the hostname of the source. + registries: + oss: + enabled: true + cloud: + enabled: false + remoteRegistries: + pypi: + enabled: true + packageName: airbyte-source-mgramseva + connectorBuildOptions: + # Please update to the latest version of the connector base image. + # https://hub.docker.com/r/airbyte/python-connector-base + # Please use the full address with sha256 hash to guarantee build reproducibility. + baseImage: docker.io/airbyte/python-connector-base:1.2.0@sha256:c22a9d97464b69d6ef01898edf3f8612dc11614f05a84984451dde195f337db9 + connectorSubtype: api + connectorType: source + definitionId: 96dd2030-cf66-4957-bf53-d271f8dcdfdb + dockerImageTag: 0.1.0 + dockerRepository: airbyte/source-mgramseva + githubIssueLabel: source-mgramseva + icon: mgramseva.svg + license: MIT + name: Mgramseva + releaseDate: TODO + supportLevel: community + releaseStage: alpha + documentationUrl: https://docs.airbyte.com/integrations/sources/mgramseva + tags: + - language:python +metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-mgramseva/poetry.lock b/airbyte-integrations/connectors/source-mgramseva/poetry.lock new file mode 100644 index 000000000000..7afdb8137c04 --- /dev/null +++ b/airbyte-integrations/connectors/source-mgramseva/poetry.lock @@ -0,0 +1,1315 @@ +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. + +[[package]] +name = "airbyte-cdk" +version = "0.90.0" +description = "A framework for writing Airbyte Connectors." +optional = false +python-versions = "<4.0,>=3.9" +files = [ + {file = "airbyte_cdk-0.90.0-py3-none-any.whl", hash = "sha256:bd0aa5843cdc4901f2e482f0e86695ca4e6db83b65c5017799255dd20535cf56"}, + {file = "airbyte_cdk-0.90.0.tar.gz", hash = "sha256:25cefc010718bada5cce3f87e7ae93068630732c0d34ce5145f8ddf7457d4d3c"}, +] + +[package.dependencies] +airbyte-protocol-models = ">=0.9.0,<1.0" +backoff = "*" +cachetools = "*" +cryptography = ">=42.0.5,<43.0.0" +Deprecated = ">=1.2,<1.3" +dpath = ">=2.0.1,<2.1.0" +genson = "1.2.2" +isodate = ">=0.6.1,<0.7.0" +Jinja2 = ">=3.1.2,<3.2.0" +jsonref = ">=0.2,<0.3" +jsonschema = ">=3.2.0,<3.3.0" +langchain_core = "0.1.42" +pendulum = "<3.0.0" +pydantic = ">=1.10.8,<2.0.0" +pyjwt = ">=2.8.0,<3.0.0" +pyrate-limiter = ">=3.1.0,<3.2.0" +python-dateutil = "*" +pytz = "2024.1" +PyYAML = ">=6.0.1,<7.0.0" +requests = "*" +requests_cache = "*" +wcmatch = "8.4" + +[package.extras] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +sphinx-docs = ["Sphinx (>=4.2,<4.3)", "sphinx-rtd-theme (>=1.0,<1.1)"] +vector-db-based = ["cohere (==4.21)", "langchain (==0.1.16)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] + +[[package]] +name = "airbyte-protocol-models" +version = "0.12.2" +description = "Declares the Airbyte Protocol." +optional = false +python-versions = ">=3.8" +files = [ + {file = "airbyte_protocol_models-0.12.2-py3-none-any.whl", hash = "sha256:1780db5b26285865b858d26502933def8e11919c9436ccf7b8b9cb0170b07c2a"}, + {file = "airbyte_protocol_models-0.12.2.tar.gz", hash = "sha256:b7c4d9a7c32c0691601c2b9416af090a858e126666e2c8c880d7a1798eb519f0"}, +] + +[package.dependencies] +pydantic = ">=1.9.2,<2.0.0" + +[[package]] +name = "attrs" +version = "23.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] + +[[package]] +name = "backoff" +version = "2.2.1" +description = "Function decoration for backoff and retry" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, + {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, +] + +[[package]] +name = "bracex" +version = "2.4" +description = "Bash style brace expander." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bracex-2.4-py3-none-any.whl", hash = "sha256:efdc71eff95eaff5e0f8cfebe7d01adf2c8637c8c92edaf63ef348c241a82418"}, + {file = "bracex-2.4.tar.gz", hash = "sha256:a27eaf1df42cf561fed58b7a8f3fdf129d1ea16a81e1fadd1d17989bc6384beb"}, +] + +[[package]] +name = "cachetools" +version = "5.4.0" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.4.0-py3-none-any.whl", hash = "sha256:3ae3b49a3d5e28a77a0be2b37dbcb89005058959cb2323858c2657c4a8cab474"}, + {file = "cachetools-5.4.0.tar.gz", hash = "sha256:b8adc2e7c07f105ced7bc56dbb6dfbe7c4a00acce20e2227b3f355be89bc6827"}, +] + +[[package]] +name = "cattrs" +version = "23.2.3" +description = "Composable complex class support for attrs and dataclasses." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cattrs-23.2.3-py3-none-any.whl", hash = "sha256:0341994d94971052e9ee70662542699a3162ea1e0c62f7ce1b4a57f563685108"}, + {file = "cattrs-23.2.3.tar.gz", hash = "sha256:a934090d95abaa9e911dac357e3a8699e0b4b14f8529bcc7d2b1ad9d51672b9f"}, +] + +[package.dependencies] +attrs = ">=23.1.0" +exceptiongroup = {version = ">=1.1.1", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.1.0,<4.6.3 || >4.6.3", markers = "python_version < \"3.11\""} + +[package.extras] +bson = ["pymongo (>=4.4.0)"] +cbor2 = ["cbor2 (>=5.4.6)"] +msgpack = ["msgpack (>=1.0.5)"] +orjson = ["orjson (>=3.9.2)"] +pyyaml = ["pyyaml (>=6.0)"] +tomlkit = ["tomlkit (>=0.11.8)"] +ujson = ["ujson (>=5.7.0)"] + +[[package]] +name = "certifi" +version = "2024.7.4" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, + {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, +] + +[[package]] +name = "cffi" +version = "1.16.0" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, + {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, + {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, + {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, + {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, + {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, + {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, + {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, + {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, + {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, + {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, + {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, + {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, + {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, + {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, +] + +[package.dependencies] +pycparser = "*" + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "cryptography" +version = "42.0.8" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +optional = false +python-versions = ">=3.7" +files = [ + {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e"}, + {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:961e61cefdcb06e0c6d7e3a1b22ebe8b996eb2bf50614e89384be54c48c6b63d"}, + {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3ec3672626e1b9e55afd0df6d774ff0e953452886e06e0f1eb7eb0c832e8902"}, + {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e599b53fd95357d92304510fb7bda8523ed1f79ca98dce2f43c115950aa78801"}, + {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5226d5d21ab681f432a9c1cf8b658c0cb02533eece706b155e5fbd8a0cdd3949"}, + {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:6b7c4f03ce01afd3b76cf69a5455caa9cfa3de8c8f493e0d3ab7d20611c8dae9"}, + {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:2346b911eb349ab547076f47f2e035fc8ff2c02380a7cbbf8d87114fa0f1c583"}, + {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ad803773e9df0b92e0a817d22fd8a3675493f690b96130a5e24f1b8fabbea9c7"}, + {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2f66d9cd9147ee495a8374a45ca445819f8929a3efcd2e3df6428e46c3cbb10b"}, + {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:d45b940883a03e19e944456a558b67a41160e367a719833c53de6911cabba2b7"}, + {file = "cryptography-42.0.8-cp37-abi3-win32.whl", hash = "sha256:a0c5b2b0585b6af82d7e385f55a8bc568abff8923af147ee3c07bd8b42cda8b2"}, + {file = "cryptography-42.0.8-cp37-abi3-win_amd64.whl", hash = "sha256:57080dee41209e556a9a4ce60d229244f7a66ef52750f813bfbe18959770cfba"}, + {file = "cryptography-42.0.8-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:dea567d1b0e8bc5764b9443858b673b734100c2871dc93163f58c46a97a83d28"}, + {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4783183f7cb757b73b2ae9aed6599b96338eb957233c58ca8f49a49cc32fd5e"}, + {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0608251135d0e03111152e41f0cc2392d1e74e35703960d4190b2e0f4ca9c70"}, + {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dc0fdf6787f37b1c6b08e6dfc892d9d068b5bdb671198c72072828b80bd5fe4c"}, + {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:9c0c1716c8447ee7dbf08d6db2e5c41c688544c61074b54fc4564196f55c25a7"}, + {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fff12c88a672ab9c9c1cf7b0c80e3ad9e2ebd9d828d955c126be4fd3e5578c9e"}, + {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:cafb92b2bc622cd1aa6a1dce4b93307792633f4c5fe1f46c6b97cf67073ec961"}, + {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:31f721658a29331f895a5a54e7e82075554ccfb8b163a18719d342f5ffe5ecb1"}, + {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b297f90c5723d04bcc8265fc2a0f86d4ea2e0f7ab4b6994459548d3a6b992a14"}, + {file = "cryptography-42.0.8-cp39-abi3-win32.whl", hash = "sha256:2f88d197e66c65be5e42cd72e5c18afbfae3f741742070e3019ac8f4ac57262c"}, + {file = "cryptography-42.0.8-cp39-abi3-win_amd64.whl", hash = "sha256:fa76fbb7596cc5839320000cdd5d0955313696d9511debab7ee7278fc8b5c84a"}, + {file = "cryptography-42.0.8-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ba4f0a211697362e89ad822e667d8d340b4d8d55fae72cdd619389fb5912eefe"}, + {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:81884c4d096c272f00aeb1f11cf62ccd39763581645b0812e99a91505fa48e0c"}, + {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c9bb2ae11bfbab395bdd072985abde58ea9860ed84e59dbc0463a5d0159f5b71"}, + {file = "cryptography-42.0.8-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7016f837e15b0a1c119d27ecd89b3515f01f90a8615ed5e9427e30d9cdbfed3d"}, + {file = "cryptography-42.0.8-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5a94eccb2a81a309806027e1670a358b99b8fe8bfe9f8d329f27d72c094dde8c"}, + {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dec9b018df185f08483f294cae6ccac29e7a6e0678996587363dc352dc65c842"}, + {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:343728aac38decfdeecf55ecab3264b015be68fc2816ca800db649607aeee648"}, + {file = "cryptography-42.0.8-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:013629ae70b40af70c9a7a5db40abe5d9054e6f4380e50ce769947b73bf3caad"}, + {file = "cryptography-42.0.8.tar.gz", hash = "sha256:8d09d05439ce7baa8e9e95b07ec5b6c886f548deb7e0f69ef25f64b3bce842f2"}, +] + +[package.dependencies] +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] +nox = ["nox"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] +sdist = ["build"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test-randomorder = ["pytest-randomly"] + +[[package]] +name = "deprecated" +version = "1.2.14" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, + {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, +] + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] + +[[package]] +name = "dpath" +version = "2.0.8" +description = "Filesystem-like pathing and searching for dictionaries" +optional = false +python-versions = ">=3.7" +files = [ + {file = "dpath-2.0.8-py3-none-any.whl", hash = "sha256:f92f595214dd93a00558d75d4b858beee519f4cffca87f02616ad6cd013f3436"}, + {file = "dpath-2.0.8.tar.gz", hash = "sha256:a3440157ebe80d0a3ad794f1b61c571bef125214800ffdb9afc9424e8250fe9b"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.2" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, + {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "genson" +version = "1.2.2" +description = "GenSON is a powerful, user-friendly JSON Schema generator." +optional = false +python-versions = "*" +files = [ + {file = "genson-1.2.2.tar.gz", hash = "sha256:8caf69aa10af7aee0e1a1351d1d06801f4696e005f06cedef438635384346a16"}, +] + +[[package]] +name = "idna" +version = "3.7" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, + {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "isodate" +version = "0.6.1" +description = "An ISO 8601 date/time/duration parser and formatter" +optional = false +python-versions = "*" +files = [ + {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, + {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "jinja2" +version = "3.1.4" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, + {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jsonpatch" +version = "1.33" +description = "Apply JSON-Patches (RFC 6902)" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" +files = [ + {file = "jsonpatch-1.33-py2.py3-none-any.whl", hash = "sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade"}, + {file = "jsonpatch-1.33.tar.gz", hash = "sha256:9fcd4009c41e6d12348b4a0ff2563ba56a2923a7dfee731d004e212e1ee5030c"}, +] + +[package.dependencies] +jsonpointer = ">=1.9" + +[[package]] +name = "jsonpointer" +version = "3.0.0" +description = "Identify specific nodes in a JSON document (RFC 6901)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "jsonpointer-3.0.0-py2.py3-none-any.whl", hash = "sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942"}, + {file = "jsonpointer-3.0.0.tar.gz", hash = "sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef"}, +] + +[[package]] +name = "jsonref" +version = "0.2" +description = "An implementation of JSON Reference for Python" +optional = false +python-versions = "*" +files = [ + {file = "jsonref-0.2-py3-none-any.whl", hash = "sha256:b1e82fa0b62e2c2796a13e5401fe51790b248f6d9bf9d7212a3e31a3501b291f"}, + {file = "jsonref-0.2.tar.gz", hash = "sha256:f3c45b121cf6257eafabdc3a8008763aed1cd7da06dbabc59a9e4d2a5e4e6697"}, +] + +[[package]] +name = "jsonschema" +version = "3.2.0" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = "*" +files = [ + {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, + {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, +] + +[package.dependencies] +attrs = ">=17.4.0" +pyrsistent = ">=0.14.0" +setuptools = "*" +six = ">=1.11.0" + +[package.extras] +format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] +format-nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "webcolors"] + +[[package]] +name = "langchain-core" +version = "0.1.42" +description = "Building applications with LLMs through composability" +optional = false +python-versions = "<4.0,>=3.8.1" +files = [ + {file = "langchain_core-0.1.42-py3-none-any.whl", hash = "sha256:c5653ffa08a44f740295c157a24c0def4a753333f6a2c41f76bf431cd00be8b5"}, + {file = "langchain_core-0.1.42.tar.gz", hash = "sha256:40751bf60ea5d8e2b2efe65290db434717ee3834870c002e40e2811f09d814e6"}, +] + +[package.dependencies] +jsonpatch = ">=1.33,<2.0" +langsmith = ">=0.1.0,<0.2.0" +packaging = ">=23.2,<24.0" +pydantic = ">=1,<3" +PyYAML = ">=5.3" +tenacity = ">=8.1.0,<9.0.0" + +[package.extras] +extended-testing = ["jinja2 (>=3,<4)"] + +[[package]] +name = "langsmith" +version = "0.1.93" +description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." +optional = false +python-versions = "<4.0,>=3.8.1" +files = [ + {file = "langsmith-0.1.93-py3-none-any.whl", hash = "sha256:811210b9d5f108f36431bd7b997eb9476a9ecf5a2abd7ddbb606c1cdcf0f43ce"}, + {file = "langsmith-0.1.93.tar.gz", hash = "sha256:285b6ad3a54f50fa8eb97b5f600acc57d0e37e139dd8cf2111a117d0435ba9b4"}, +] + +[package.dependencies] +orjson = ">=3.9.14,<4.0.0" +pydantic = {version = ">=1,<3", markers = "python_full_version < \"3.12.4\""} +requests = ">=2,<3" + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "orjson" +version = "3.10.6" +description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" +optional = false +python-versions = ">=3.8" +files = [ + {file = "orjson-3.10.6-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:fb0ee33124db6eaa517d00890fc1a55c3bfe1cf78ba4a8899d71a06f2d6ff5c7"}, + {file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c1c4b53b24a4c06547ce43e5fee6ec4e0d8fe2d597f4647fc033fd205707365"}, + {file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eadc8fd310edb4bdbd333374f2c8fec6794bbbae99b592f448d8214a5e4050c0"}, + {file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:61272a5aec2b2661f4fa2b37c907ce9701e821b2c1285d5c3ab0207ebd358d38"}, + {file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57985ee7e91d6214c837936dc1608f40f330a6b88bb13f5a57ce5257807da143"}, + {file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:633a3b31d9d7c9f02d49c4ab4d0a86065c4a6f6adc297d63d272e043472acab5"}, + {file = "orjson-3.10.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:1c680b269d33ec444afe2bdc647c9eb73166fa47a16d9a75ee56a374f4a45f43"}, + {file = "orjson-3.10.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f759503a97a6ace19e55461395ab0d618b5a117e8d0fbb20e70cfd68a47327f2"}, + {file = "orjson-3.10.6-cp310-none-win32.whl", hash = "sha256:95a0cce17f969fb5391762e5719575217bd10ac5a189d1979442ee54456393f3"}, + {file = "orjson-3.10.6-cp310-none-win_amd64.whl", hash = "sha256:df25d9271270ba2133cc88ee83c318372bdc0f2cd6f32e7a450809a111efc45c"}, + {file = "orjson-3.10.6-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:b1ec490e10d2a77c345def52599311849fc063ae0e67cf4f84528073152bb2ba"}, + {file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55d43d3feb8f19d07e9f01e5b9be4f28801cf7c60d0fa0d279951b18fae1932b"}, + {file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac3045267e98fe749408eee1593a142e02357c5c99be0802185ef2170086a863"}, + {file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c27bc6a28ae95923350ab382c57113abd38f3928af3c80be6f2ba7eb8d8db0b0"}, + {file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d27456491ca79532d11e507cadca37fb8c9324a3976294f68fb1eff2dc6ced5a"}, + {file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05ac3d3916023745aa3b3b388e91b9166be1ca02b7c7e41045da6d12985685f0"}, + {file = "orjson-3.10.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1335d4ef59ab85cab66fe73fd7a4e881c298ee7f63ede918b7faa1b27cbe5212"}, + {file = "orjson-3.10.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4bbc6d0af24c1575edc79994c20e1b29e6fb3c6a570371306db0993ecf144dc5"}, + {file = "orjson-3.10.6-cp311-none-win32.whl", hash = "sha256:450e39ab1f7694465060a0550b3f6d328d20297bf2e06aa947b97c21e5241fbd"}, + {file = "orjson-3.10.6-cp311-none-win_amd64.whl", hash = "sha256:227df19441372610b20e05bdb906e1742ec2ad7a66ac8350dcfd29a63014a83b"}, + {file = "orjson-3.10.6-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:ea2977b21f8d5d9b758bb3f344a75e55ca78e3ff85595d248eee813ae23ecdfb"}, + {file = "orjson-3.10.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b6f3d167d13a16ed263b52dbfedff52c962bfd3d270b46b7518365bcc2121eed"}, + {file = "orjson-3.10.6-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f710f346e4c44a4e8bdf23daa974faede58f83334289df80bc9cd12fe82573c7"}, + {file = "orjson-3.10.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7275664f84e027dcb1ad5200b8b18373e9c669b2a9ec33d410c40f5ccf4b257e"}, + {file = "orjson-3.10.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0943e4c701196b23c240b3d10ed8ecd674f03089198cf503105b474a4f77f21f"}, + {file = "orjson-3.10.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:446dee5a491b5bc7d8f825d80d9637e7af43f86a331207b9c9610e2f93fee22a"}, + {file = "orjson-3.10.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:64c81456d2a050d380786413786b057983892db105516639cb5d3ee3c7fd5148"}, + {file = "orjson-3.10.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:960db0e31c4e52fa0fc3ecbaea5b2d3b58f379e32a95ae6b0ebeaa25b93dfd34"}, + {file = "orjson-3.10.6-cp312-none-win32.whl", hash = "sha256:a6ea7afb5b30b2317e0bee03c8d34c8181bc5a36f2afd4d0952f378972c4efd5"}, + {file = "orjson-3.10.6-cp312-none-win_amd64.whl", hash = "sha256:874ce88264b7e655dde4aeaacdc8fd772a7962faadfb41abe63e2a4861abc3dc"}, + {file = "orjson-3.10.6-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:66680eae4c4e7fc193d91cfc1353ad6d01b4801ae9b5314f17e11ba55e934183"}, + {file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:caff75b425db5ef8e8f23af93c80f072f97b4fb3afd4af44482905c9f588da28"}, + {file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3722fddb821b6036fd2a3c814f6bd9b57a89dc6337b9924ecd614ebce3271394"}, + {file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c2c116072a8533f2fec435fde4d134610f806bdac20188c7bd2081f3e9e0133f"}, + {file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6eeb13218c8cf34c61912e9df2de2853f1d009de0e46ea09ccdf3d757896af0a"}, + {file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:965a916373382674e323c957d560b953d81d7a8603fbeee26f7b8248638bd48b"}, + {file = "orjson-3.10.6-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:03c95484d53ed8e479cade8628c9cea00fd9d67f5554764a1110e0d5aa2de96e"}, + {file = "orjson-3.10.6-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:e060748a04cccf1e0a6f2358dffea9c080b849a4a68c28b1b907f272b5127e9b"}, + {file = "orjson-3.10.6-cp38-none-win32.whl", hash = "sha256:738dbe3ef909c4b019d69afc19caf6b5ed0e2f1c786b5d6215fbb7539246e4c6"}, + {file = "orjson-3.10.6-cp38-none-win_amd64.whl", hash = "sha256:d40f839dddf6a7d77114fe6b8a70218556408c71d4d6e29413bb5f150a692ff7"}, + {file = "orjson-3.10.6-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:697a35a083c4f834807a6232b3e62c8b280f7a44ad0b759fd4dce748951e70db"}, + {file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd502f96bf5ea9a61cbc0b2b5900d0dd68aa0da197179042bdd2be67e51a1e4b"}, + {file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f215789fb1667cdc874c1b8af6a84dc939fd802bf293a8334fce185c79cd359b"}, + {file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a2debd8ddce948a8c0938c8c93ade191d2f4ba4649a54302a7da905a81f00b56"}, + {file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5410111d7b6681d4b0d65e0f58a13be588d01b473822483f77f513c7f93bd3b2"}, + {file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb1f28a137337fdc18384079fa5726810681055b32b92253fa15ae5656e1dddb"}, + {file = "orjson-3.10.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:bf2fbbce5fe7cd1aa177ea3eab2b8e6a6bc6e8592e4279ed3db2d62e57c0e1b2"}, + {file = "orjson-3.10.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:79b9b9e33bd4c517445a62b90ca0cc279b0f1f3970655c3df9e608bc3f91741a"}, + {file = "orjson-3.10.6-cp39-none-win32.whl", hash = "sha256:30b0a09a2014e621b1adf66a4f705f0809358350a757508ee80209b2d8dae219"}, + {file = "orjson-3.10.6-cp39-none-win_amd64.whl", hash = "sha256:49e3bc615652617d463069f91b867a4458114c5b104e13b7ae6872e5f79d0844"}, + {file = "orjson-3.10.6.tar.gz", hash = "sha256:e54b63d0a7c6c54a5f5f726bc93a2078111ef060fec4ecbf34c5db800ca3b3a7"}, +] + +[[package]] +name = "packaging" +version = "23.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, +] + +[[package]] +name = "pendulum" +version = "2.1.2" +description = "Python datetimes made easy" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"}, + {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"}, + {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"}, + {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"}, + {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"}, + {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"}, + {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"}, + {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"}, + {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"}, + {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"}, + {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"}, +] + +[package.dependencies] +python-dateutil = ">=2.6,<3.0" +pytzdata = ">=2020.1" + +[[package]] +name = "platformdirs" +version = "4.2.2" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, + {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] +type = ["mypy (>=1.8)"] + +[[package]] +name = "pluggy" +version = "1.5.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pycparser" +version = "2.22" +description = "C parser in Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, + {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, +] + +[[package]] +name = "pydantic" +version = "1.10.17" +description = "Data validation and settings management using python type hints" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-1.10.17-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0fa51175313cc30097660b10eec8ca55ed08bfa07acbfe02f7a42f6c242e9a4b"}, + {file = "pydantic-1.10.17-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7e8988bb16988890c985bd2093df9dd731bfb9d5e0860db054c23034fab8f7a"}, + {file = "pydantic-1.10.17-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:371dcf1831f87c9e217e2b6a0c66842879a14873114ebb9d0861ab22e3b5bb1e"}, + {file = "pydantic-1.10.17-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4866a1579c0c3ca2c40575398a24d805d4db6cb353ee74df75ddeee3c657f9a7"}, + {file = "pydantic-1.10.17-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:543da3c6914795b37785703ffc74ba4d660418620cc273490d42c53949eeeca6"}, + {file = "pydantic-1.10.17-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7623b59876f49e61c2e283551cc3647616d2fbdc0b4d36d3d638aae8547ea681"}, + {file = "pydantic-1.10.17-cp310-cp310-win_amd64.whl", hash = "sha256:409b2b36d7d7d19cd8310b97a4ce6b1755ef8bd45b9a2ec5ec2b124db0a0d8f3"}, + {file = "pydantic-1.10.17-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fa43f362b46741df8f201bf3e7dff3569fa92069bcc7b4a740dea3602e27ab7a"}, + {file = "pydantic-1.10.17-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2a72d2a5ff86a3075ed81ca031eac86923d44bc5d42e719d585a8eb547bf0c9b"}, + {file = "pydantic-1.10.17-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4ad32aed3bf5eea5ca5decc3d1bbc3d0ec5d4fbcd72a03cdad849458decbc63"}, + {file = "pydantic-1.10.17-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aeb4e741782e236ee7dc1fb11ad94dc56aabaf02d21df0e79e0c21fe07c95741"}, + {file = "pydantic-1.10.17-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d2f89a719411cb234105735a520b7c077158a81e0fe1cb05a79c01fc5eb59d3c"}, + {file = "pydantic-1.10.17-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db3b48d9283d80a314f7a682f7acae8422386de659fffaba454b77a083c3937d"}, + {file = "pydantic-1.10.17-cp311-cp311-win_amd64.whl", hash = "sha256:9c803a5113cfab7bbb912f75faa4fc1e4acff43e452c82560349fff64f852e1b"}, + {file = "pydantic-1.10.17-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:820ae12a390c9cbb26bb44913c87fa2ff431a029a785642c1ff11fed0a095fcb"}, + {file = "pydantic-1.10.17-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c1e51d1af306641b7d1574d6d3307eaa10a4991542ca324f0feb134fee259815"}, + {file = "pydantic-1.10.17-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e53fb834aae96e7b0dadd6e92c66e7dd9cdf08965340ed04c16813102a47fab"}, + {file = "pydantic-1.10.17-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e2495309b1266e81d259a570dd199916ff34f7f51f1b549a0d37a6d9b17b4dc"}, + {file = "pydantic-1.10.17-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:098ad8de840c92ea586bf8efd9e2e90c6339d33ab5c1cfbb85be66e4ecf8213f"}, + {file = "pydantic-1.10.17-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:525bbef620dac93c430d5d6bdbc91bdb5521698d434adf4434a7ef6ffd5c4b7f"}, + {file = "pydantic-1.10.17-cp312-cp312-win_amd64.whl", hash = "sha256:6654028d1144df451e1da69a670083c27117d493f16cf83da81e1e50edce72ad"}, + {file = "pydantic-1.10.17-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c87cedb4680d1614f1d59d13fea353faf3afd41ba5c906a266f3f2e8c245d655"}, + {file = "pydantic-1.10.17-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11289fa895bcbc8f18704efa1d8020bb9a86314da435348f59745473eb042e6b"}, + {file = "pydantic-1.10.17-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94833612d6fd18b57c359a127cbfd932d9150c1b72fea7c86ab58c2a77edd7c7"}, + {file = "pydantic-1.10.17-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d4ecb515fa7cb0e46e163ecd9d52f9147ba57bc3633dca0e586cdb7a232db9e3"}, + {file = "pydantic-1.10.17-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7017971ffa7fd7808146880aa41b266e06c1e6e12261768a28b8b41ba55c8076"}, + {file = "pydantic-1.10.17-cp37-cp37m-win_amd64.whl", hash = "sha256:e840e6b2026920fc3f250ea8ebfdedf6ea7a25b77bf04c6576178e681942ae0f"}, + {file = "pydantic-1.10.17-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bfbb18b616abc4df70591b8c1ff1b3eabd234ddcddb86b7cac82657ab9017e33"}, + {file = "pydantic-1.10.17-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ebb249096d873593e014535ab07145498957091aa6ae92759a32d40cb9998e2e"}, + {file = "pydantic-1.10.17-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8c209af63ccd7b22fba94b9024e8b7fd07feffee0001efae50dd99316b27768"}, + {file = "pydantic-1.10.17-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d4b40c9e13a0b61583e5599e7950490c700297b4a375b55b2b592774332798b7"}, + {file = "pydantic-1.10.17-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c31d281c7485223caf6474fc2b7cf21456289dbaa31401844069b77160cab9c7"}, + {file = "pydantic-1.10.17-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ae5184e99a060a5c80010a2d53c99aee76a3b0ad683d493e5f0620b5d86eeb75"}, + {file = "pydantic-1.10.17-cp38-cp38-win_amd64.whl", hash = "sha256:ad1e33dc6b9787a6f0f3fd132859aa75626528b49cc1f9e429cdacb2608ad5f0"}, + {file = "pydantic-1.10.17-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7e17c0ee7192e54a10943f245dc79e36d9fe282418ea05b886e1c666063a7b54"}, + {file = "pydantic-1.10.17-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cafb9c938f61d1b182dfc7d44a7021326547b7b9cf695db5b68ec7b590214773"}, + {file = "pydantic-1.10.17-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95ef534e3c22e5abbdbdd6f66b6ea9dac3ca3e34c5c632894f8625d13d084cbe"}, + {file = "pydantic-1.10.17-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62d96b8799ae3d782df7ec9615cb59fc32c32e1ed6afa1b231b0595f6516e8ab"}, + {file = "pydantic-1.10.17-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ab2f976336808fd5d539fdc26eb51f9aafc1f4b638e212ef6b6f05e753c8011d"}, + {file = "pydantic-1.10.17-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b8ad363330557beac73159acfbeed220d5f1bfcd6b930302a987a375e02f74fd"}, + {file = "pydantic-1.10.17-cp39-cp39-win_amd64.whl", hash = "sha256:48db882e48575ce4b39659558b2f9f37c25b8d348e37a2b4e32971dd5a7d6227"}, + {file = "pydantic-1.10.17-py3-none-any.whl", hash = "sha256:e41b5b973e5c64f674b3b4720286ded184dcc26a691dd55f34391c62c6934688"}, + {file = "pydantic-1.10.17.tar.gz", hash = "sha256:f434160fb14b353caf634149baaf847206406471ba70e64657c1e8330277a991"}, +] + +[package.dependencies] +typing-extensions = ">=4.2.0" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + +[[package]] +name = "pyjwt" +version = "2.8.0" +description = "JSON Web Token implementation in Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "PyJWT-2.8.0-py3-none-any.whl", hash = "sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320"}, + {file = "PyJWT-2.8.0.tar.gz", hash = "sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de"}, +] + +[package.extras] +crypto = ["cryptography (>=3.4.0)"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] +docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] +tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] + +[[package]] +name = "pyrate-limiter" +version = "3.1.1" +description = "Python Rate-Limiter using Leaky-Bucket Algorithm" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pyrate_limiter-3.1.1-py3-none-any.whl", hash = "sha256:c51906f1d51d56dc992ff6c26e8300e32151bc6cfa3e6559792e31971dfd4e2b"}, + {file = "pyrate_limiter-3.1.1.tar.gz", hash = "sha256:2f57eda712687e6eccddf6afe8f8a15b409b97ed675fe64a626058f12863b7b7"}, +] + +[package.extras] +all = ["filelock (>=3.0)", "redis (>=5.0.0,<6.0.0)"] +docs = ["furo (>=2022.3.4,<2023.0.0)", "myst-parser (>=0.17)", "sphinx (>=4.3.0,<5.0.0)", "sphinx-autodoc-typehints (>=1.17,<2.0)", "sphinx-copybutton (>=0.5)", "sphinxcontrib-apidoc (>=0.3,<0.4)"] + +[[package]] +name = "pyrsistent" +version = "0.20.0" +description = "Persistent/Functional/Immutable data structures" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyrsistent-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c3aba3e01235221e5b229a6c05f585f344734bd1ad42a8ac51493d74722bbce"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1beb78af5423b879edaf23c5591ff292cf7c33979734c99aa66d5914ead880f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21cc459636983764e692b9eba7144cdd54fdec23ccdb1e8ba392a63666c60c34"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5ac696f02b3fc01a710427585c855f65cd9c640e14f52abe52020722bb4906b"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win32.whl", hash = "sha256:0724c506cd8b63c69c7f883cc233aac948c1ea946ea95996ad8b1380c25e1d3f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:8441cf9616d642c475684d6cf2520dd24812e996ba9af15e606df5f6fd9d04a7"}, + {file = "pyrsistent-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0f3b1bcaa1f0629c978b355a7c37acd58907390149b7311b5db1b37648eb6958"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cdd7ef1ea7a491ae70d826b6cc64868de09a1d5ff9ef8d574250d0940e275b8"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cae40a9e3ce178415040a0383f00e8d68b569e97f31928a3a8ad37e3fde6df6a"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6288b3fa6622ad8a91e6eb759cfc48ff3089e7c17fb1d4c59a919769314af224"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win32.whl", hash = "sha256:7d29c23bdf6e5438c755b941cef867ec2a4a172ceb9f50553b6ed70d50dfd656"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:59a89bccd615551391f3237e00006a26bcf98a4d18623a19909a2c48b8e986ee"}, + {file = "pyrsistent-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:09848306523a3aba463c4b49493a760e7a6ca52e4826aa100ee99d8d39b7ad1e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a14798c3005ec892bbada26485c2eea3b54109cb2533713e355c806891f63c5e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b14decb628fac50db5e02ee5a35a9c0772d20277824cfe845c8a8b717c15daa3"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e2c116cc804d9b09ce9814d17df5edf1df0c624aba3b43bc1ad90411487036d"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win32.whl", hash = "sha256:e78d0c7c1e99a4a45c99143900ea0546025e41bb59ebc10182e947cf1ece9174"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:4021a7f963d88ccd15b523787d18ed5e5269ce57aa4037146a2377ff607ae87d"}, + {file = "pyrsistent-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:79ed12ba79935adaac1664fd7e0e585a22caa539dfc9b7c7c6d5ebf91fb89054"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f920385a11207dc372a028b3f1e1038bb244b3ec38d448e6d8e43c6b3ba20e98"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f5c2d012671b7391803263419e31b5c7c21e7c95c8760d7fc35602353dee714"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef3992833fbd686ee783590639f4b8343a57f1f75de8633749d984dc0eb16c86"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win32.whl", hash = "sha256:881bbea27bbd32d37eb24dd320a5e745a2a5b092a17f6debc1349252fac85423"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win_amd64.whl", hash = "sha256:6d270ec9dd33cdb13f4d62c95c1a5a50e6b7cdd86302b494217137f760495b9d"}, + {file = "pyrsistent-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ca52d1ceae015859d16aded12584c59eb3825f7b50c6cfd621d4231a6cc624ce"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b318ca24db0f0518630e8b6f3831e9cba78f099ed5c1d65ffe3e023003043ba0"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed2c3216a605dc9a6ea50c7e84c82906e3684c4e80d2908208f662a6cbf9022"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e14c95c16211d166f59c6611533d0dacce2e25de0f76e4c140fde250997b3ca"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win32.whl", hash = "sha256:f058a615031eea4ef94ead6456f5ec2026c19fb5bd6bfe86e9665c4158cf802f"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:58b8f6366e152092194ae68fefe18b9f0b4f89227dfd86a07770c3d86097aebf"}, + {file = "pyrsistent-0.20.0-py3-none-any.whl", hash = "sha256:c55acc4733aad6560a7f5f818466631f07efc001fd023f34a6c203f8b6df0f0b"}, + {file = "pyrsistent-0.20.0.tar.gz", hash = "sha256:4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4"}, +] + +[[package]] +name = "pytest" +version = "8.3.2" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-8.3.2-py3-none-any.whl", hash = "sha256:4ba08f9ae7dcf84ded419494d229b48d0903ea6407b030eaec46df5e6a73bba5"}, + {file = "pytest-8.3.2.tar.gz", hash = "sha256:c132345d12ce551242c87269de812483f5bcc87cdbb4722e48487ba194f9fdce"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=1.5,<2" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} + +[package.extras] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-mock" +version = "3.14.0" +description = "Thin-wrapper around the mock package for easier use with pytest" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, +] + +[package.dependencies] +pytest = ">=6.2.5" + +[package.extras] +dev = ["pre-commit", "pytest-asyncio", "tox"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pytz" +version = "2024.1" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, +] + +[[package]] +name = "pytzdata" +version = "2020.1" +description = "The Olson timezone database for Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, + {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "requests" +version = "2.32.3" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-cache" +version = "1.2.1" +description = "A persistent cache for python requests" +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests_cache-1.2.1-py3-none-any.whl", hash = "sha256:1285151cddf5331067baa82598afe2d47c7495a1334bfe7a7d329b43e9fd3603"}, + {file = "requests_cache-1.2.1.tar.gz", hash = "sha256:68abc986fdc5b8d0911318fbb5f7c80eebcd4d01bfacc6685ecf8876052511d1"}, +] + +[package.dependencies] +attrs = ">=21.2" +cattrs = ">=22.2" +platformdirs = ">=2.5" +requests = ">=2.22" +url-normalize = ">=1.4" +urllib3 = ">=1.25.5" + +[package.extras] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] +bson = ["bson (>=0.5)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] +dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] +json = ["ujson (>=5.4)"] +mongodb = ["pymongo (>=3)"] +redis = ["redis (>=3)"] +security = ["itsdangerous (>=2.0)"] +yaml = ["pyyaml (>=6.0.1)"] + +[[package]] +name = "requests-mock" +version = "1.12.1" +description = "Mock out responses from the requests package" +optional = false +python-versions = ">=3.5" +files = [ + {file = "requests-mock-1.12.1.tar.gz", hash = "sha256:e9e12e333b525156e82a3c852f22016b9158220d2f47454de9cae8a77d371401"}, + {file = "requests_mock-1.12.1-py2.py3-none-any.whl", hash = "sha256:b1e37054004cdd5e56c84454cc7df12b25f90f382159087f4b6915aaeef39563"}, +] + +[package.dependencies] +requests = ">=2.22,<3" + +[package.extras] +fixture = ["fixtures"] + +[[package]] +name = "setuptools" +version = "71.1.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-71.1.0-py3-none-any.whl", hash = "sha256:33874fdc59b3188304b2e7c80d9029097ea31627180896fb549c578ceb8a0855"}, + {file = "setuptools-71.1.0.tar.gz", hash = "sha256:032d42ee9fb536e33087fb66cac5f840eb9391ed05637b3f2a76a7c8fb477936"}, +] + +[package.extras] +core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.text (>=3.7)", "more-itertools (>=8.8)", "ordered-set (>=3.1.1)", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.11.*)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (<0.4)", "pytest-ruff (>=0.2.1)", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "tenacity" +version = "8.5.0" +description = "Retry code until it succeeds" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tenacity-8.5.0-py3-none-any.whl", hash = "sha256:b594c2a5945830c267ce6b79a166228323ed52718f30302c1359836112346687"}, + {file = "tenacity-8.5.0.tar.gz", hash = "sha256:8bc6c0c8a09b31e6cad13c47afbed1a567518250a9a171418582ed8d9c20ca78"}, +] + +[package.extras] +doc = ["reno", "sphinx"] +test = ["pytest", "tornado (>=4.5)", "typeguard"] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "typing-extensions" +version = "4.12.2" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, +] + +[[package]] +name = "url-normalize" +version = "1.4.3" +description = "URL normalization for Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "url-normalize-1.4.3.tar.gz", hash = "sha256:d23d3a070ac52a67b83a1c59a0e68f8608d1cd538783b401bc9de2c0fac999b2"}, + {file = "url_normalize-1.4.3-py2.py3-none-any.whl", hash = "sha256:ec3c301f04e5bb676d333a7fa162fa977ad2ca04b7e652bfc9fac4e405728eed"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "urllib3" +version = "2.2.2" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, + {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "wcmatch" +version = "8.4" +description = "Wildcard/glob file name matcher." +optional = false +python-versions = ">=3.7" +files = [ + {file = "wcmatch-8.4-py3-none-any.whl", hash = "sha256:dc7351e5a7f8bbf4c6828d51ad20c1770113f5f3fd3dfe2a03cfde2a63f03f98"}, + {file = "wcmatch-8.4.tar.gz", hash = "sha256:ba4fc5558f8946bf1ffc7034b05b814d825d694112499c86035e0e4d398b6a67"}, +] + +[package.dependencies] +bracex = ">=2.1.1" + +[[package]] +name = "wrapt" +version = "1.16.0" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = ">=3.6" +files = [ + {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, + {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, + {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, + {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, + {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, + {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, + {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, + {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, + {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, + {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, + {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, + {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, + {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, + {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, + {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, + {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, + {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, + {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, +] + +[metadata] +lock-version = "2.0" +python-versions = "^3.9,<3.12" +content-hash = "acd5908c82765b55ec5859799db1bcbb616d044db689a3ba94346d8b1d2f9b5c" diff --git a/airbyte-integrations/connectors/source-mgramseva/pyproject.toml b/airbyte-integrations/connectors/source-mgramseva/pyproject.toml new file mode 100644 index 000000000000..f1c7c476df7d --- /dev/null +++ b/airbyte-integrations/connectors/source-mgramseva/pyproject.toml @@ -0,0 +1,28 @@ +[build-system] +requires = [ "poetry-core>=1.0.0",] +build-backend = "poetry.core.masonry.api" + +[tool.poetry] +version = "0.1.0" +name = "source-mgramseva" +description = "Source implementation for mgramseva." +authors = [ "Airbyte ",] +license = "MIT" +readme = "README.md" +documentation = "https://docs.airbyte.com/integrations/sources/mgramseva" +homepage = "https://airbyte.com" +repository = "https://github.com/airbytehq/airbyte" +packages = [ { include = "source_mgramseva" }, {include = "main.py" } ] + +[tool.poetry.dependencies] +python = "^3.9,<3.12" +airbyte-cdk = "^0" + +[tool.poetry.scripts] +source-mgramseva = "source_mgramseva.run:run" + +[tool.poetry.group.dev.dependencies] +requests-mock = "*" +pytest-mock = "*" +pytest = "*" + diff --git a/airbyte-integrations/connectors/source-mgramseva/setup.py b/airbyte-integrations/connectors/source-mgramseva/setup.py new file mode 100644 index 000000000000..d52b5f6ec3f6 --- /dev/null +++ b/airbyte-integrations/connectors/source-mgramseva/setup.py @@ -0,0 +1,46 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk", +] + +TEST_REQUIREMENTS = [ + "requests-mock~=1.9.3", + "pytest~=6.1", + "pytest-mock~=3.6.1", +] + +setup( + entry_points={ + "console_scripts": [ + "source-mgramseva=source_mgramseva.run:run", + ], + }, + name="source_mgramseva", + description="Source implementation for mGramSeva.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={ + "": [ + # Include yaml files in the package (if any) + "*.yml", + "*.yaml", + # Include all json files in the package, up to 4 levels deep + "*.json", + "*/*.json", + "*/*/*.json", + "*/*/*/*.json", + "*/*/*/*/*.json", + ] + }, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-mgramseva/source_mgramseva/__init__.py b/airbyte-integrations/connectors/source-mgramseva/source_mgramseva/__init__.py new file mode 100644 index 000000000000..a130859c3fcd --- /dev/null +++ b/airbyte-integrations/connectors/source-mgramseva/source_mgramseva/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceMgramseva + +__all__ = ["SourceMgramseva"] diff --git a/airbyte-integrations/connectors/source-mgramseva/source_mgramseva/run.py b/airbyte-integrations/connectors/source-mgramseva/source_mgramseva/run.py new file mode 100644 index 000000000000..206c2ad08dc1 --- /dev/null +++ b/airbyte-integrations/connectors/source-mgramseva/source_mgramseva/run.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from .source import SourceMgramseva + +def run(): + source = SourceMgramseva() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-mgramseva/source_mgramseva/schemas/TODO.md b/airbyte-integrations/connectors/source-mgramseva/source_mgramseva/schemas/TODO.md new file mode 100644 index 000000000000..0037aeb60d89 --- /dev/null +++ b/airbyte-integrations/connectors/source-mgramseva/source_mgramseva/schemas/TODO.md @@ -0,0 +1,30 @@ +# TODO: Define your stream schemas + +Your connector must describe the schema of each stream it can output using [JSONSchema](https://json-schema.org). + +The simplest way to do this is to describe the schema of your streams using one `.json` file per stream. You can also dynamically generate the schema of your stream in code, or you can combine both approaches: start with a `.json` file and dynamically add properties to it. + +The schema of a stream is the return value of `Stream.get_json_schema`. + +## Static schemas + +By default, `Stream.get_json_schema` reads a `.json` file in the `schemas/` directory whose name is equal to the value of the `Stream.name` property. In turn `Stream.name` by default returns the name of the class in snake case. Therefore, if you have a class `class EmployeeBenefits(HttpStream)` the default behavior will look for a file called `schemas/employee_benefits.json`. You can override any of these behaviors as you need. + +Important note: any objects referenced via `$ref` should be placed in the `shared/` directory in their own `.json` files. + +## Dynamic schemas + +If you'd rather define your schema in code, override `Stream.get_json_schema` in your stream class to return a `dict` describing the schema using [JSONSchema](https://json-schema.org). + +## Dynamically modifying static schemas + +Override `Stream.get_json_schema` to run the default behavior, edit the returned value, then return the edited value: + +``` +def get_json_schema(self): + schema = super().get_json_schema() + schema['dynamically_determined_property'] = "property" + return schema +``` + +Delete this file once you're done. Or don't. Up to you :) diff --git a/airbyte-integrations/connectors/source-mgramseva/source_mgramseva/schemas/mgramseva_bills.json b/airbyte-integrations/connectors/source-mgramseva/source_mgramseva/schemas/mgramseva_bills.json new file mode 100644 index 000000000000..a7af717984f2 --- /dev/null +++ b/airbyte-integrations/connectors/source-mgramseva/source_mgramseva/schemas/mgramseva_bills.json @@ -0,0 +1,12 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "data": { + "type": "object" + } + } +} \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-mgramseva/source_mgramseva/schemas/mgramseva_demands.json b/airbyte-integrations/connectors/source-mgramseva/source_mgramseva/schemas/mgramseva_demands.json new file mode 100644 index 000000000000..a7af717984f2 --- /dev/null +++ b/airbyte-integrations/connectors/source-mgramseva/source_mgramseva/schemas/mgramseva_demands.json @@ -0,0 +1,12 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "data": { + "type": "object" + } + } +} \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-mgramseva/source_mgramseva/schemas/mgramseva_payments.json b/airbyte-integrations/connectors/source-mgramseva/source_mgramseva/schemas/mgramseva_payments.json new file mode 100644 index 000000000000..a7af717984f2 --- /dev/null +++ b/airbyte-integrations/connectors/source-mgramseva/source_mgramseva/schemas/mgramseva_payments.json @@ -0,0 +1,12 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "data": { + "type": "object" + } + } +} \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-mgramseva/source_mgramseva/schemas/mgramseva_tenant_expenses.json b/airbyte-integrations/connectors/source-mgramseva/source_mgramseva/schemas/mgramseva_tenant_expenses.json new file mode 100644 index 000000000000..a7af717984f2 --- /dev/null +++ b/airbyte-integrations/connectors/source-mgramseva/source_mgramseva/schemas/mgramseva_tenant_expenses.json @@ -0,0 +1,12 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "data": { + "type": "object" + } + } +} \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-mgramseva/source_mgramseva/source.py b/airbyte-integrations/connectors/source-mgramseva/source_mgramseva/source.py new file mode 100644 index 000000000000..b602cd98f6f8 --- /dev/null +++ b/airbyte-integrations/connectors/source-mgramseva/source_mgramseva/source.py @@ -0,0 +1,281 @@ +# +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. +# + + +from abc import ABC +from typing import Any, Iterable, List, Mapping, MutableMapping, Optional, Tuple + +import base64 +from datetime import datetime +from logging import Logger +import requests +import pytz +from airbyte_cdk.models import SyncMode +from airbyte_cdk.sources import AbstractSource +from airbyte_cdk.sources.streams import Stream +from airbyte_cdk.sources.streams.http import HttpStream + + +# Basic full refresh stream +class MgramsevaStream(HttpStream, ABC): + """Base for all objects""" + + url_base = "https://www.peyjalbihar.org/" + + http_method = "POST" + + primary_key = "id" + + def __init__(self, endpoint: str, headers: dict, request_info: dict, user_request: dict, params: dict, response_key: str, **kwargs): + """set base url, headers, request info and user request""" + super().__init__(**kwargs) + self.endpoint = endpoint + self.headers = headers + self.request_info = request_info + self.user_request = user_request + self.params = params + self.response_key = response_key + + def path( + self, + stream_state: Mapping[str, Any] = None, # pylint: disable=unused-argument + stream_slice: Mapping[str, Any] = None, # pylint: disable=unused-argument + next_page_token: Mapping[str, Any] = None, # pylint: disable=unused-argument + ) -> str: + """path""" + return self.endpoint + + def request_headers( + self, + stream_state: Optional[Mapping[str, Any]], # pylint: disable=unused-argument + stream_slice: Optional[Mapping[str, Any]] = None, # pylint: disable=unused-argument + next_page_token: Optional[Mapping[str, Any]] = None, # pylint: disable=unused-argument + ) -> Mapping[str, Any]: + """Return headers required for the request""" + return self.headers + + def request_body_json( + self, + stream_state: Optional[Mapping[str, Any]], # pylint: disable=unused-argument + stream_slice: Optional[Mapping[str, Any]] = None, # pylint: disable=unused-argument + next_page_token: Optional[Mapping[str, Any]] = None, # pylint: disable=unused-argument + ) -> Optional[Mapping[str, Any]]: + """ + All requests require the same body + """ + return {"RequestInfo": self.request_info, "userInfo": self.user_request} + + def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: + """ + TODO: Override this method to define a pagination strategy. If you will not be using pagination, no action is required - just return None. + + This method should return a Mapping (e.g: dict) containing whatever information required to make paginated requests. This dict is passed + to most other methods in this class to help you form headers, request bodies, query params, etc.. + + For example, if the API accepts a 'page' parameter to determine which page of the result to return, and a response from the API contains a + 'page' number, then this method should probably return a dict {'page': response.json()['page'] + 1} to increment the page count by 1. + The request_params method should then read the input next_page_token and set the 'page' param to next_page_token['page']. + + :param response: the most recent response from the API + :return If there is another page in the result, a mapping (e.g: dict) containing information needed to query the next page in the response. + If there are no more pages in the result, return None. + """ + return None + + def request_params( + self, + stream_state: Mapping[str, Any], # pylint: disable=unused-argument + stream_slice: Mapping[str, any] = None, # pylint: disable=unused-argument + next_page_token: Mapping[str, Any] = None, # pylint: disable=unused-argument + ) -> MutableMapping[str, Any]: + """request parameters""" + return self.params + + def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: + """ + :return an iterable containing each record in the response + """ + return map(lambda x: {"data": x, "id": x["id"]}, response.json()[self.response_key]) + + # def base_schema(self) -> dict: + # """Base schema for all streams""" + # return { + # "$schema": "http://json-schema.org/draft-07/schema#", + # "type": "object", + # "properties": {"id": {"type": "string"}, "data": {"type": "object"}}, + # } + + +class MgramsevaDemands(MgramsevaStream): + """object for consumer demands""" + + def __init__(self, headers: dict, request_info: dict, user_request: dict, start_date: datetime, end_date: datetime, **kwargs): + """specify endpoint for demands and call super""" + params = { + "tenantId": "br.testing", + "businessService": "WS", + "periodFrom": int(1000 * start_date.timestamp()), + "periodTo": int(1000 * end_date.timestamp()), + } + super().__init__("billing-service/demand/_search", headers, request_info, user_request, params, "Demands", **kwargs) + + +class MgramsevaBills(MgramsevaStream): + """object for consumer bills""" + + @property + def name(self) -> str: + return f"Bill_{self.consumer_code.replace('/', '_')}" + + def get_json_schema(self) -> Mapping[str, Any]: + """override""" + return { + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": {"id": {"type": "string"}, "data": {"type": "object"}}, + } + + def __init__(self, headers: dict, request_info: dict, user_request: dict, consumer_code: str, **kwargs): + """specify endpoint for bills and call super""" + self.consumer_code = consumer_code + params = { + "tenantId": "br.testing", + "consumerCode": consumer_code, + "businessService": "WS", + } + super().__init__("billing-service/bill/v2/_fetchbill", headers, request_info, user_request, params, "Bill", **kwargs) + + +class MgramsevaTenantExpenses(MgramsevaStream): + """object for tenant payments""" + + def __init__(self, headers: dict, request_info: dict, user_request: dict, **kwargs): + """ + specify endpoint for demands and call super + 1672531200000 = 2023-01-01 00:00 + 1830297600000 = 2028-01-01 00:00 + """ + params = {"tenantId": "br.testing", "fromDate": 1672531200000, "toDate": 1830297600000} + super().__init__( + "echallan-services/eChallan/v1/_expenseDashboard", headers, request_info, user_request, params, "ExpenseDashboard", **kwargs + ) + + def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: + """ + :this response has only one object, so return it + """ + return [{"data": response.json()[self.response_key], "id": "1"}] + + +class MgramsevaPayments(MgramsevaStream): + """object for consumer payments""" + + def __init__(self, headers: dict, request_info: dict, user_request: dict, **kwargs): + """specify endpoint for payments and call super""" + params = {"tenantId": "br.testing", "businessService": "WS"} + super().__init__("collection-services/payments/WS/_search", headers, request_info, user_request, params, "Payments", **kwargs) + + +# Source +class SourceMgramseva(AbstractSource): + """Source for mGramSeva""" + + def __init__(self): + """constructor""" + self.headers = {} + self.request_info = {} + self.user_request = {} + self.base_url = None + self.config = {} + self.setup_complete = False + + def setup(self, config: dict) -> None: + """ + config contains + - base_url + - client_user + - client_password + - username + - password + """ + if self.setup_complete: + return + if config["client_password"] == "no-pass": + config["client_password"] = "" + client_user_password = f'{config["client_user"]}:{config["client_password"]}' + apikey = base64.encodebytes(client_user_password.encode("ascii")).decode("utf-8").strip() + self.headers = {"Authorization": "Basic " + apikey} + + base_url = config["base_url"] + if base_url[-1] != "/": + base_url += "/" + self.base_url = base_url + + self.config = config + self.setup_complete = True + + def get_auth_token(self) -> None: + """performs the auth step to get the access token and the user info""" + + response = requests.post( + self.base_url + "user/oauth/token", + params={ + "username": self.config["username"], + "password": self.config["password"], + "scope": "read", + "grant_type": "password", + "tenantId": "br", + "userType": "EMPLOYEE", + }, + headers=self.headers, + timeout=15, + ) + + response.raise_for_status() + + auth_response = response.json() + self.user_request = auth_response["UserRequest"] + self.request_info = { + "action": "_search", + "apiId": "mgramseva", + "authToken": auth_response["access_token"], + "userInfo": self.user_request, + } + + def check_connection(self, logger: Logger, config) -> Tuple[bool, any]: + """attempt to connect to the API with the provided credentials""" + try: + self.setup(config) + self.get_auth_token() + except requests.HTTPError as e: + logger.exception(e) + return False, str(e) + return True, None + + def streams(self, config: Mapping[str, Any]) -> List[Stream]: + """return all the streams we have to sync""" + + self.setup(config) + self.get_auth_token() + + # Generate streams for each object type + streams = [ + MgramsevaPayments(self.headers, self.request_info, self.user_request), + MgramsevaTenantExpenses(self.headers, self.request_info, self.user_request), + ] + + start_date = datetime.strptime(config.get("start_date", "2022-01-01"), "%Y-%m-%d").replace(tzinfo=pytz.UTC) + end_date = datetime.today().replace(tzinfo=pytz.UTC) + + demand_stream = MgramsevaDemands(self.headers, self.request_info, self.user_request, start_date, end_date) + streams.append(demand_stream) + + # and now we need bills for each consumer + consumer_codes = set() + for demand in demand_stream.read_records(SyncMode.full_refresh): + if demand["data"]["consumerCode"] not in consumer_codes: + consumer_codes.add(demand["data"]["consumerCode"]) + streams.append(MgramsevaBills(self.headers, self.request_info, self.user_request, demand["data"]["consumerCode"])) + + return streams diff --git a/airbyte-integrations/connectors/source-mgramseva/source_mgramseva/spec.yaml b/airbyte-integrations/connectors/source-mgramseva/source_mgramseva/spec.yaml new file mode 100644 index 000000000000..02e89bcf27a9 --- /dev/null +++ b/airbyte-integrations/connectors/source-mgramseva/source_mgramseva/spec.yaml @@ -0,0 +1,41 @@ +documentationUrl: https://docsurl.com +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Mgramseva Spec + type: object + required: + - client_user + - base_url + - username + - password + properties: + client_user: + type: string + title: user for Basic Auth + order: 0 + client_password: + type: string + title: password for Basic Auth + airbyte_secret: true + default: "" + order: 1 + base_url: + type: string + title: Base URL + order: 2 + username: + type: string + title: username for API + order: 3 + password: + type: string + title: password for API + airbyte_secret: true + order: 4 + start_date: + type: string + title: Start date for extracting records + pattern: ^[0-9]{4}-[0-9]{2}-[0-9]{2}$ + default: "2022-01-01" + description: Date from which to look for consumer demands + order: 5 \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-mgramseva/unit_tests/__init__.py b/airbyte-integrations/connectors/source-mgramseva/unit_tests/__init__.py new file mode 100644 index 000000000000..66f6de8cb2bb --- /dev/null +++ b/airbyte-integrations/connectors/source-mgramseva/unit_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-mgramseva/unit_tests/test_incremental_streams.py b/airbyte-integrations/connectors/source-mgramseva/unit_tests/test_incremental_streams.py new file mode 100644 index 000000000000..022916b81ffe --- /dev/null +++ b/airbyte-integrations/connectors/source-mgramseva/unit_tests/test_incremental_streams.py @@ -0,0 +1,59 @@ +# +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. +# + + +from airbyte_cdk.models import SyncMode +from pytest import fixture +from source_mgramseva.source import IncrementalMgramsevaStream + + +@fixture +def patch_incremental_base_class(mocker): + # Mock abstract methods to enable instantiating abstract class + mocker.patch.object(IncrementalMgramsevaStream, "path", "v0/example_endpoint") + mocker.patch.object(IncrementalMgramsevaStream, "primary_key", "test_primary_key") + mocker.patch.object(IncrementalMgramsevaStream, "__abstractmethods__", set()) + + +def test_cursor_field(patch_incremental_base_class): + stream = IncrementalMgramsevaStream() + # TODO: replace this with your expected cursor field + expected_cursor_field = [] + assert stream.cursor_field == expected_cursor_field + + +def test_get_updated_state(patch_incremental_base_class): + stream = IncrementalMgramsevaStream() + # TODO: replace this with your input parameters + inputs = {"current_stream_state": None, "latest_record": None} + # TODO: replace this with your expected updated stream state + expected_state = {} + assert stream.get_updated_state(**inputs) == expected_state + + +def test_stream_slices(patch_incremental_base_class): + stream = IncrementalMgramsevaStream() + # TODO: replace this with your input parameters + inputs = {"sync_mode": SyncMode.incremental, "cursor_field": [], "stream_state": {}} + # TODO: replace this with your expected stream slices list + expected_stream_slice = [None] + assert stream.stream_slices(**inputs) == expected_stream_slice + + +def test_supports_incremental(patch_incremental_base_class, mocker): + mocker.patch.object(IncrementalMgramsevaStream, "cursor_field", "dummy_field") + stream = IncrementalMgramsevaStream() + assert stream.supports_incremental + + +def test_source_defined_cursor(patch_incremental_base_class): + stream = IncrementalMgramsevaStream() + assert stream.source_defined_cursor + + +def test_stream_checkpoint_interval(patch_incremental_base_class): + stream = IncrementalMgramsevaStream() + # TODO: replace this with your expected checkpoint interval + expected_checkpoint_interval = None + assert stream.state_checkpoint_interval == expected_checkpoint_interval diff --git a/airbyte-integrations/connectors/source-mgramseva/unit_tests/test_source.py b/airbyte-integrations/connectors/source-mgramseva/unit_tests/test_source.py new file mode 100644 index 000000000000..e8a663af0da7 --- /dev/null +++ b/airbyte-integrations/connectors/source-mgramseva/unit_tests/test_source.py @@ -0,0 +1,22 @@ +# +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. +# + +from unittest.mock import MagicMock + +from source_mgramseva.source import SourceMgramseva + + +def test_check_connection(mocker): + source = SourceMgramseva() + logger_mock, config_mock = MagicMock(), MagicMock() + assert source.check_connection(logger_mock, config_mock) == (True, None) + + +def test_streams(mocker): + source = SourceMgramseva() + config_mock = MagicMock() + streams = source.streams(config_mock) + # TODO: replace this with your streams number + expected_streams_number = 2 + assert len(streams) == expected_streams_number diff --git a/airbyte-integrations/connectors/source-mgramseva/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-mgramseva/unit_tests/test_streams.py new file mode 100644 index 000000000000..e1936aebe600 --- /dev/null +++ b/airbyte-integrations/connectors/source-mgramseva/unit_tests/test_streams.py @@ -0,0 +1,83 @@ +# +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. +# + +from http import HTTPStatus +from unittest.mock import MagicMock + +import pytest +from source_mgramseva.source import MgramsevaStream + + +@pytest.fixture +def patch_base_class(mocker): + # Mock abstract methods to enable instantiating abstract class + mocker.patch.object(MgramsevaStream, "path", "v0/example_endpoint") + mocker.patch.object(MgramsevaStream, "primary_key", "test_primary_key") + mocker.patch.object(MgramsevaStream, "__abstractmethods__", set()) + + +def test_request_params(patch_base_class): + stream = MgramsevaStream() + # TODO: replace this with your input parameters + inputs = {"stream_slice": None, "stream_state": None, "next_page_token": None} + # TODO: replace this with your expected request parameters + expected_params = {} + assert stream.request_params(**inputs) == expected_params + + +def test_next_page_token(patch_base_class): + stream = MgramsevaStream() + # TODO: replace this with your input parameters + inputs = {"response": MagicMock()} + # TODO: replace this with your expected next page token + expected_token = None + assert stream.next_page_token(**inputs) == expected_token + + +def test_parse_response(patch_base_class): + stream = MgramsevaStream() + # TODO: replace this with your input parameters + inputs = {"response": MagicMock()} + # TODO: replace this with your expected parced object + expected_parsed_object = {} + assert next(stream.parse_response(**inputs)) == expected_parsed_object + + +def test_request_headers(patch_base_class): + stream = MgramsevaStream() + # TODO: replace this with your input parameters + inputs = {"stream_slice": None, "stream_state": None, "next_page_token": None} + # TODO: replace this with your expected request headers + expected_headers = {} + assert stream.request_headers(**inputs) == expected_headers + + +def test_http_method(patch_base_class): + stream = MgramsevaStream() + # TODO: replace this with your expected http request method + expected_method = "GET" + assert stream.http_method == expected_method + + +@pytest.mark.parametrize( + ("http_status", "should_retry"), + [ + (HTTPStatus.OK, False), + (HTTPStatus.BAD_REQUEST, False), + (HTTPStatus.TOO_MANY_REQUESTS, True), + (HTTPStatus.INTERNAL_SERVER_ERROR, True), + ], +) +def test_should_retry(patch_base_class, http_status, should_retry): + response_mock = MagicMock() + response_mock.status_code = http_status + stream = MgramsevaStream() + assert stream.should_retry(response_mock) == should_retry + + +def test_backoff_time(patch_base_class): + response_mock = MagicMock() + stream = MgramsevaStream() + expected_backoff_time = None + assert stream.backoff_time(response_mock) == expected_backoff_time From 30de055dba8aade8dd99e1f14b24d380bf76b9f6 Mon Sep 17 00:00:00 2001 From: Rohit Chatterjee Date: Wed, 31 Jul 2024 15:14:00 +0530 Subject: [PATCH 34/38] client_password is optional so we should handle the case where it isn't specified --- .../connectors/source-mgramseva/source_mgramseva/source.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/airbyte-integrations/connectors/source-mgramseva/source_mgramseva/source.py b/airbyte-integrations/connectors/source-mgramseva/source_mgramseva/source.py index b602cd98f6f8..83c4ef1df0f0 100644 --- a/airbyte-integrations/connectors/source-mgramseva/source_mgramseva/source.py +++ b/airbyte-integrations/connectors/source-mgramseva/source_mgramseva/source.py @@ -201,6 +201,8 @@ def setup(self, config: dict) -> None: """ if self.setup_complete: return + if "client_password" not in config or config["client_password"] is None: + config["client_password"] = "" if config["client_password"] == "no-pass": config["client_password"] = "" client_user_password = f'{config["client_user"]}:{config["client_password"]}' From 925c8ff070d3297b577e25bb80aecc72242ab429 Mon Sep 17 00:00:00 2001 From: Rohit Chatterjee Date: Tue, 6 Aug 2024 13:21:43 +0530 Subject: [PATCH 35/38] put the bills into a single table --- .../source_mgramseva/source.py | 54 +++++++++---------- 1 file changed, 26 insertions(+), 28 deletions(-) diff --git a/airbyte-integrations/connectors/source-mgramseva/source_mgramseva/source.py b/airbyte-integrations/connectors/source-mgramseva/source_mgramseva/source.py index 83c4ef1df0f0..fdcda5da9655 100644 --- a/airbyte-integrations/connectors/source-mgramseva/source_mgramseva/source.py +++ b/airbyte-integrations/connectors/source-mgramseva/source_mgramseva/source.py @@ -15,6 +15,7 @@ from airbyte_cdk.sources import AbstractSource from airbyte_cdk.sources.streams import Stream from airbyte_cdk.sources.streams.http import HttpStream +from airbyte_cdk.sources.streams.core import StreamData # Basic full refresh stream @@ -98,14 +99,6 @@ def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapp """ return map(lambda x: {"data": x, "id": x["id"]}, response.json()[self.response_key]) - # def base_schema(self) -> dict: - # """Base schema for all streams""" - # return { - # "$schema": "http://json-schema.org/draft-07/schema#", - # "type": "object", - # "properties": {"id": {"type": "string"}, "data": {"type": "object"}}, - # } - class MgramsevaDemands(MgramsevaStream): """object for consumer demands""" @@ -124,27 +117,32 @@ def __init__(self, headers: dict, request_info: dict, user_request: dict, start_ class MgramsevaBills(MgramsevaStream): """object for consumer bills""" - @property - def name(self) -> str: - return f"Bill_{self.consumer_code.replace('/', '_')}" - - def get_json_schema(self) -> Mapping[str, Any]: - """override""" - return { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": {"id": {"type": "string"}, "data": {"type": "object"}}, - } - - def __init__(self, headers: dict, request_info: dict, user_request: dict, consumer_code: str, **kwargs): + def __init__(self, headers: dict, request_info: dict, user_request: dict, consumer_codes: list, **kwargs): """specify endpoint for bills and call super""" - self.consumer_code = consumer_code - params = { + self.headers = headers + self.request_info = request_info + self.user_request = user_request + self.consumer_codes = consumer_codes + self.params = { "tenantId": "br.testing", - "consumerCode": consumer_code, "businessService": "WS", } - super().__init__("billing-service/bill/v2/_fetchbill", headers, request_info, user_request, params, "Bill", **kwargs) + + def read_records( + self, + sync_mode: SyncMode, + cursor_field: Optional[List[str]] = None, + stream_slice: Optional[Mapping[str, Any]] = None, + stream_state: Optional[Mapping[str, Any]] = None, + ) -> Iterable[StreamData]: + """override""" + for consumer_code in self.consumer_codes: + params = self.params.copy() + params["consumerCode"] = consumer_code + consumer_code_stream = MgramsevaStream( + "billing-service/bill/v2/_fetchbill", self.headers, self.request_info, self.user_request, params, "Bill" + ) + yield from consumer_code_stream.read_records(sync_mode, cursor_field, stream_slice, stream_state) class MgramsevaTenantExpenses(MgramsevaStream): @@ -276,8 +274,8 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: # and now we need bills for each consumer consumer_codes = set() for demand in demand_stream.read_records(SyncMode.full_refresh): - if demand["data"]["consumerCode"] not in consumer_codes: - consumer_codes.add(demand["data"]["consumerCode"]) - streams.append(MgramsevaBills(self.headers, self.request_info, self.user_request, demand["data"]["consumerCode"])) + consumer_codes.add(demand["data"]["consumerCode"]) + + streams.append(MgramsevaBills(self.headers, self.request_info, self.user_request, list(consumer_codes))) return streams From 4735d44ad2baf1c853723f02608825f72e76d5d1 Mon Sep 17 00:00:00 2001 From: Rohit Chatterjee Date: Wed, 7 Aug 2024 15:35:10 +0530 Subject: [PATCH 36/38] tenant expenses: from and to dates are now in config tenant ids are now in config --- .../source_mgramseva/source.py | 70 ++++++++++++------- .../source_mgramseva/spec.yaml | 22 +++++- 2 files changed, 66 insertions(+), 26 deletions(-) diff --git a/airbyte-integrations/connectors/source-mgramseva/source_mgramseva/source.py b/airbyte-integrations/connectors/source-mgramseva/source_mgramseva/source.py index fdcda5da9655..d0f8e78a0a3d 100644 --- a/airbyte-integrations/connectors/source-mgramseva/source_mgramseva/source.py +++ b/airbyte-integrations/connectors/source-mgramseva/source_mgramseva/source.py @@ -103,10 +103,12 @@ def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapp class MgramsevaDemands(MgramsevaStream): """object for consumer demands""" - def __init__(self, headers: dict, request_info: dict, user_request: dict, start_date: datetime, end_date: datetime, **kwargs): + def __init__( + self, headers: dict, request_info: dict, user_request: dict, tenantid: str, start_date: datetime, end_date: datetime, **kwargs + ): """specify endpoint for demands and call super""" params = { - "tenantId": "br.testing", + "tenantId": tenantid, "businessService": "WS", "periodFrom": int(1000 * start_date.timestamp()), "periodTo": int(1000 * end_date.timestamp()), @@ -117,14 +119,14 @@ def __init__(self, headers: dict, request_info: dict, user_request: dict, start_ class MgramsevaBills(MgramsevaStream): """object for consumer bills""" - def __init__(self, headers: dict, request_info: dict, user_request: dict, consumer_codes: list, **kwargs): + def __init__(self, headers: dict, request_info: dict, user_request: dict, tenantid: str, consumer_codes: list, **kwargs): """specify endpoint for bills and call super""" self.headers = headers self.request_info = request_info self.user_request = user_request self.consumer_codes = consumer_codes self.params = { - "tenantId": "br.testing", + "tenantId": tenantid, "businessService": "WS", } @@ -148,13 +150,16 @@ def read_records( class MgramsevaTenantExpenses(MgramsevaStream): """object for tenant payments""" - def __init__(self, headers: dict, request_info: dict, user_request: dict, **kwargs): + def __init__(self, headers: dict, request_info: dict, user_request: dict, tenantid: str, fromdate: int, todate: int, **kwargs): """ specify endpoint for demands and call super 1672531200000 = 2023-01-01 00:00 1830297600000 = 2028-01-01 00:00 """ - params = {"tenantId": "br.testing", "fromDate": 1672531200000, "toDate": 1830297600000} + self.tenantid = tenantid + self.fromdate = fromdate + self.todate = todate + params = {"tenantId": self.tenantid, "fromDate": self.fromdate, "toDate": self.todate} super().__init__( "echallan-services/eChallan/v1/_expenseDashboard", headers, request_info, user_request, params, "ExpenseDashboard", **kwargs ) @@ -163,15 +168,19 @@ def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapp """ :this response has only one object, so return it """ - return [{"data": response.json()[self.response_key], "id": "1"}] + expenses = response.json()[self.response_key] + expenses["tenantId"] = self.tenantid + expenses["fromDate"] = self.fromdate + expenses["toDate"] = self.todate + return [{"data": expenses, "id": "1"}] class MgramsevaPayments(MgramsevaStream): """object for consumer payments""" - def __init__(self, headers: dict, request_info: dict, user_request: dict, **kwargs): + def __init__(self, headers: dict, request_info: dict, user_request: dict, tenantid: str, **kwargs): """specify endpoint for payments and call super""" - params = {"tenantId": "br.testing", "businessService": "WS"} + params = {"tenantId": tenantid, "businessService": "WS"} super().__init__("collection-services/payments/WS/_search", headers, request_info, user_request, params, "Payments", **kwargs) @@ -259,23 +268,34 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: self.setup(config) self.get_auth_token() - # Generate streams for each object type - streams = [ - MgramsevaPayments(self.headers, self.request_info, self.user_request), - MgramsevaTenantExpenses(self.headers, self.request_info, self.user_request), - ] + tenant_expenses_from = datetime.strptime(config.get("tenant_expenses_from", "2022-01-01"), "%Y-%m-%d") + tenant_expenses_to = datetime.strptime(config.get("tenant_expenses_to", "2022-01-01"), "%Y-%m-%d") start_date = datetime.strptime(config.get("start_date", "2022-01-01"), "%Y-%m-%d").replace(tzinfo=pytz.UTC) end_date = datetime.today().replace(tzinfo=pytz.UTC) - demand_stream = MgramsevaDemands(self.headers, self.request_info, self.user_request, start_date, end_date) - streams.append(demand_stream) - - # and now we need bills for each consumer - consumer_codes = set() - for demand in demand_stream.read_records(SyncMode.full_refresh): - consumer_codes.add(demand["data"]["consumerCode"]) - - streams.append(MgramsevaBills(self.headers, self.request_info, self.user_request, list(consumer_codes))) - - return streams + for tenantid in self.config["tenantids"]: + # Generate streams for each object type + streams = [ + MgramsevaPayments(self.headers, self.request_info, self.user_request, tenantid), + MgramsevaTenantExpenses( + self.headers, + self.request_info, + self.user_request, + tenantid, + int(tenant_expenses_from.timestamp() * 1000), + int(tenant_expenses_to.timestamp() * 1000), + ), + ] + + demand_stream = MgramsevaDemands(self.headers, self.request_info, self.user_request, tenantid, start_date, end_date) + streams.append(demand_stream) + + # and now we need bills for each consumer + consumer_codes = set() + for demand in demand_stream.read_records(SyncMode.full_refresh): + consumer_codes.add(demand["data"]["consumerCode"]) + + streams.append(MgramsevaBills(self.headers, self.request_info, self.user_request, tenantid, list(consumer_codes))) + + return streams diff --git a/airbyte-integrations/connectors/source-mgramseva/source_mgramseva/spec.yaml b/airbyte-integrations/connectors/source-mgramseva/source_mgramseva/spec.yaml index 02e89bcf27a9..ce3b65adfe2b 100644 --- a/airbyte-integrations/connectors/source-mgramseva/source_mgramseva/spec.yaml +++ b/airbyte-integrations/connectors/source-mgramseva/source_mgramseva/spec.yaml @@ -8,6 +8,7 @@ connectionSpecification: - base_url - username - password + - tenantids properties: client_user: type: string @@ -38,4 +39,23 @@ connectionSpecification: pattern: ^[0-9]{4}-[0-9]{2}-[0-9]{2}$ default: "2022-01-01" description: Date from which to look for consumer demands - order: 5 \ No newline at end of file + order: 5 + tenantids: + type: array + title: Tenant Ids + description: Tenant Ids for which data needs to be extracted + order: 6 + tenant_expenses_from: + type: string + title: Tenant Expenses From Date + pattern: ^[0-9]{4}-[0-9]{2}-[0-9]{2}$ + default: "2022-01-01" + description: Date from which to look for tenant expenses + order: 7 + tenant_expenses_to: + type: string + title: Tenant Expenses To Date + pattern: ^[0-9]{4}-[0-9]{2}-[0-9]{2}$ + default: "2024-01-01" + description: Date till which to look for tenant expenses + order: 8 From 82f198e093a8dcab49b1227c836c85895f21f2ae Mon Sep 17 00:00:00 2001 From: Rohit Chatterjee Date: Wed, 14 Aug 2024 14:44:34 +0530 Subject: [PATCH 37/38] read tenant expenses month-by-month --- .../source_mgramseva/source.py | 102 +++++++++++++----- 1 file changed, 78 insertions(+), 24 deletions(-) diff --git a/airbyte-integrations/connectors/source-mgramseva/source_mgramseva/source.py b/airbyte-integrations/connectors/source-mgramseva/source_mgramseva/source.py index d0f8e78a0a3d..6b2f4ebbfdd8 100644 --- a/airbyte-integrations/connectors/source-mgramseva/source_mgramseva/source.py +++ b/airbyte-integrations/connectors/source-mgramseva/source_mgramseva/source.py @@ -7,8 +7,10 @@ from typing import Any, Iterable, List, Mapping, MutableMapping, Optional, Tuple import base64 +import hashlib from datetime import datetime from logging import Logger +from dateutil.relativedelta import relativedelta import requests import pytz from airbyte_cdk.models import SyncMode @@ -147,32 +149,91 @@ def read_records( yield from consumer_code_stream.read_records(sync_mode, cursor_field, stream_slice, stream_state) +class MgramsevaTenantExpense(MgramsevaStream): + """object for a single tenant expense""" + + def __init__( + self, + endpoint: str, + headers: dict, + request_info: dict, + user_request: dict, + tenantid: str, + month_start: datetime, + next_month_start: datetime, + response_key: str, + **kwargs, + ): + """call super""" + self.tenantid = tenantid + self.month_start = month_start + self.next_month_start = next_month_start + params = { + "tenantId": self.tenantid, + "fromDate": int(month_start.timestamp() * 1000), + "toDate": int(next_month_start.timestamp() * 1000), + } + super().__init__(endpoint, headers, request_info, user_request, params, response_key, **kwargs) + + def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: + """ + :this response has only one object, so return it + """ + expenses = response.json()[self.response_key] + expenses["tenantId"] = self.tenantid + expenses["fromDate"] = self.month_start.strftime("%Y-%m-%d") + expenses["toDate"] = self.next_month_start.strftime("%Y-%m-%d") + combined_string = f"{self.tenantid}{expenses['fromDate']}{expenses['toDate']}" + id_hash = hashlib.sha256(combined_string.encode()) + return [{"data": expenses, "id": id_hash.hexdigest()}] + + class MgramsevaTenantExpenses(MgramsevaStream): """object for tenant payments""" - def __init__(self, headers: dict, request_info: dict, user_request: dict, tenantid: str, fromdate: int, todate: int, **kwargs): + def __init__( + self, headers: dict, request_info: dict, user_request: dict, tenantid: str, fromdate: datetime, todate: datetime, **kwargs + ): """ specify endpoint for demands and call super 1672531200000 = 2023-01-01 00:00 1830297600000 = 2028-01-01 00:00 """ + self.headers = headers + self.request_info = request_info + self.user_request = user_request self.tenantid = tenantid self.fromdate = fromdate self.todate = todate - params = {"tenantId": self.tenantid, "fromDate": self.fromdate, "toDate": self.todate} - super().__init__( - "echallan-services/eChallan/v1/_expenseDashboard", headers, request_info, user_request, params, "ExpenseDashboard", **kwargs - ) - def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: - """ - :this response has only one object, so return it - """ - expenses = response.json()[self.response_key] - expenses["tenantId"] = self.tenantid - expenses["fromDate"] = self.fromdate - expenses["toDate"] = self.todate - return [{"data": expenses, "id": "1"}] + def read_records( + self, + sync_mode: SyncMode, + cursor_field: Optional[List[str]] = None, + stream_slice: Optional[Mapping[str, Any]] = None, + stream_state: Optional[Mapping[str, Any]] = None, + ) -> Iterable[StreamData]: + """override""" + + month_start = self.fromdate.replace(day=1) + + while month_start < self.todate: + + next_month_start = month_start + relativedelta(months=1) + + stream = MgramsevaTenantExpense( + "echallan-services/eChallan/v1/_expenseDashboard", + self.headers, + self.request_info, + self.user_request, + self.tenantid, + month_start, + next_month_start, + "ExpenseDashboard", + ) + yield from stream.read_records(sync_mode, cursor_field, stream_slice, stream_state) + + month_start = next_month_start class MgramsevaPayments(MgramsevaStream): @@ -268,8 +329,8 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: self.setup(config) self.get_auth_token() - tenant_expenses_from = datetime.strptime(config.get("tenant_expenses_from", "2022-01-01"), "%Y-%m-%d") - tenant_expenses_to = datetime.strptime(config.get("tenant_expenses_to", "2022-01-01"), "%Y-%m-%d") + # tenant_expenses_from = datetime.strptime(config.get("tenant_expenses_from", "2022-01-01"), "%Y-%m-%d") + # tenant_expenses_to = datetime.strptime(config.get("tenant_expenses_to", "2022-01-01"), "%Y-%m-%d") start_date = datetime.strptime(config.get("start_date", "2022-01-01"), "%Y-%m-%d").replace(tzinfo=pytz.UTC) end_date = datetime.today().replace(tzinfo=pytz.UTC) @@ -278,14 +339,7 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: # Generate streams for each object type streams = [ MgramsevaPayments(self.headers, self.request_info, self.user_request, tenantid), - MgramsevaTenantExpenses( - self.headers, - self.request_info, - self.user_request, - tenantid, - int(tenant_expenses_from.timestamp() * 1000), - int(tenant_expenses_to.timestamp() * 1000), - ), + MgramsevaTenantExpenses(self.headers, self.request_info, self.user_request, tenantid, start_date, end_date), ] demand_stream = MgramsevaDemands(self.headers, self.request_info, self.user_request, tenantid, start_date, end_date) From cfcfd1aa188e42c0585e5130775b162828d7e14e Mon Sep 17 00:00:00 2001 From: Rohit Chatterjee Date: Wed, 14 Aug 2024 15:04:17 +0530 Subject: [PATCH 38/38] update the command to run airbyte-ci --- airbyte-integrations/connectors/source-mgramseva/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/airbyte-integrations/connectors/source-mgramseva/README.md b/airbyte-integrations/connectors/source-mgramseva/README.md index dde87da0d594..bc97140ddd65 100644 --- a/airbyte-integrations/connectors/source-mgramseva/README.md +++ b/airbyte-integrations/connectors/source-mgramseva/README.md @@ -50,7 +50,7 @@ poetry run pytest tests 1. Install [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) 2. Run the following command to build the docker image: ```bash -airbyte-ci connectors --name=source-mgramseva build +PATH=$PATH:~/.local/bin/ airbyte-ci --disable-update-check --disable-auto-update connectors --name=source-mgramseva build ``` An image will be available on your host with the tag `airbyte/source-mgramseva:dev`.