diff --git a/.coveragerc b/.coveragerc
index 6c2162e..4f99b31 100644
--- a/.coveragerc
+++ b/.coveragerc
@@ -1,4 +1,4 @@
[run]
relative_files = True
omit =
- sekoia_automation/scripts/new_module/template/*
\ No newline at end of file
+ sekoia_automation/scripts/new_module/template/*
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 0bf27b9..c4d574b 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -51,12 +51,12 @@ jobs:
- name: Install dependencies
id: install-dependencies
- run: poetry install
+ run: poetry install --extras=all
- name: Execute Python tests
id: execute-tests
run: |
- poetry run python -m pytest --junit-xml=junit.xml --cov-report term --cov-report xml:coverage.xml --cov=sekoia_automation tests
+ poetry run python -m pytest --junit-xml=junit.xml --cov-report term --cov-report xml:coverage.xml --cov=sekoia_automation
- name: Upload Test Results
if: always()
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 2e5c387..c585ad0 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,22 +1,25 @@
# See https://pre-commit.com for more information
# See https://pre-commit.com/hooks.html for more hooks
repos:
- - repo: https://github.com/charliermarsh/ruff-pre-commit
- # Ruff version.
- rev: 'v0.0.285'
+ - repo: local
hooks:
- - id: ruff
- args: [ --fix, --exit-non-zero-on-fix ]
+ - id: black
+ name: Format with Black
+ entry: poetry run black
+ language: system
+ types: [python]
- - repo: https://github.com/ambv/black
- rev: 23.3.0
- hooks:
- - id: black
- language_version: python3
+ - id: ruff
+ name: Format with Ruff
+ entry: poetry run ruff
+ language: system
+ types: [ python ]
+ args: [ --fix, --exit-non-zero-on-fix, . ]
- - repo: https://github.com/pre-commit/mirrors-mypy
- rev: 'v1.3.0' # Use the sha / tag you want to point at
- hooks:
- id: mypy
- args: [--install-types, --non-interactive]
- exclude: sekoia_automation/scripts/new_module/template/
+ name: Validate types with MyPy
+ entry: poetry run mypy
+ language: system
+ types: [ python ]
+ pass_filenames: false
+ args: [ . ]
diff --git a/CHANGELOG.md b/CHANGELOG.md
index fc013bc..6145ffd 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -7,6 +7,11 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [Unreleased]
+### Added
+
+- Add new wrappers to work with aio libraries such as aiohttp, aiobotocore, etc.
+- New AsyncConnector that contains async implementation of push events
+
## [1.4.1] - 2023-09-13
### Added
diff --git a/poetry.lock b/poetry.lock
index 84eeb05..78482dd 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -1,9 +1,242 @@
-# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand.
+# This file is automatically @generated by Poetry and should not be changed by hand.
+
+[[package]]
+name = "aiobotocore"
+version = "2.6.0"
+description = "Async client for aws services using botocore and aiohttp"
+category = "main"
+optional = true
+python-versions = ">=3.7"
+files = [
+ {file = "aiobotocore-2.6.0-py3-none-any.whl", hash = "sha256:0186e6a843364748cdbbf76ee98e9337c44f71a4e694ad1b110d5c516fbce909"},
+ {file = "aiobotocore-2.6.0.tar.gz", hash = "sha256:4805d0140bdfa17bfc2d0ba1243c8cc4273e927201fca5cf2e497c0004a9fab7"},
+]
+
+[package.dependencies]
+aiohttp = ">=3.7.4.post0,<4.0.0"
+aioitertools = ">=0.5.1,<1.0.0"
+botocore = ">=1.31.17,<1.31.18"
+wrapt = ">=1.10.10,<2.0.0"
+
+[package.extras]
+awscli = ["awscli (>=1.29.17,<1.29.18)"]
+boto3 = ["boto3 (>=1.28.17,<1.28.18)"]
+
+[[package]]
+name = "aiocsv"
+version = "1.2.4"
+description = "Asynchronous CSV reading/writing"
+category = "main"
+optional = true
+python-versions = ">=3.6, <4"
+files = [
+ {file = "aiocsv-1.2.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6143d3c1f9535b3bb64ab5a3426ca49419ef193b987dbbb15737d5972cc7b196"},
+ {file = "aiocsv-1.2.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bbb0e2cbc3cf8c3f24da8fb0a9b3c697d98db1d63fffaa68b442fae0346a8ee"},
+ {file = "aiocsv-1.2.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8c2e8705d2717b81a6cb86c767941f2511d4c1751b4f1a58f3b24a705b720c66"},
+ {file = "aiocsv-1.2.4-cp310-cp310-win_amd64.whl", hash = "sha256:4479fda6cd395008a22b3c04e8e960c110455324c2c2fc27157e9569698bc224"},
+ {file = "aiocsv-1.2.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:59ddcacdcf4d57e667dc75b3a0354ab0b1f5eabe9088a7e5fccab96dd400ef61"},
+ {file = "aiocsv-1.2.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e32053cb29d28ce7c1b539bb040aafd5bee3934d409045f87de072baa4488e4"},
+ {file = "aiocsv-1.2.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ee0b79ed8b8a3f11ec28ca61cb9074f40f4180b25f47a63074e81b93d0fe751a"},
+ {file = "aiocsv-1.2.4-cp311-cp311-win_amd64.whl", hash = "sha256:0184c46515957788bec90e4661b3e741209ba9e7c8c0db2b26019426d0ff6221"},
+ {file = "aiocsv-1.2.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:3a8956d0a60de25d4701e680ff9783eeb89721fec9ad26f3c764686e2b6d074e"},
+ {file = "aiocsv-1.2.4-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d865c4d3abf21c590c25c531f367aa61f1c20f520ec05eec1d8e5ee1c8393316"},
+ {file = "aiocsv-1.2.4-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:e717f15cca53b1288d5b6689cff4b798c848b40ba99a5def10d4694223530daf"},
+ {file = "aiocsv-1.2.4-cp36-cp36m-win_amd64.whl", hash = "sha256:b47dfc3ea2baee21baefa875188c6b65ae64fd84831fc796a049f88e91245f9a"},
+ {file = "aiocsv-1.2.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:239a5cd94c0936774b087c266dfb6688b93bc50839652f5b9dba94bd039cac57"},
+ {file = "aiocsv-1.2.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f490c9a9862888e934beb91492ebac8024fe43871a19eb17c64a952128525eb3"},
+ {file = "aiocsv-1.2.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c6f9170a0235e770d6cd7acacd084ba6b7fc06a2ee9a5a8f18b445d5c2081618"},
+ {file = "aiocsv-1.2.4-cp37-cp37m-win_amd64.whl", hash = "sha256:c5ae550d35e65c965ffe90d7bebe802a3ab75ab7b7b1aac73dd3a26b5e46fa83"},
+ {file = "aiocsv-1.2.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:264e35c09c0dd735b41e43c2cccc05d19fc685b0ef828c5da56e2800e0ffefd9"},
+ {file = "aiocsv-1.2.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82dfe16bd19dcfbbafeffb68a0e67577192c49ffe2e2d678f476fc761fcb7ded"},
+ {file = "aiocsv-1.2.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3892ddcae649fa6dfa7dc2ced97131c6f2444b60fee97dd944cf9d43d1b51aa2"},
+ {file = "aiocsv-1.2.4-cp38-cp38-win_amd64.whl", hash = "sha256:4246180c5c9980f25c07627bf7f928ec7d58e0f119c9904add69b8f264763fcd"},
+ {file = "aiocsv-1.2.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5554f0ad0725b44682b714f3cd039b092af1ca9219e20fc029b8c3db7c928ce6"},
+ {file = "aiocsv-1.2.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02bd8f1ec4676894194043461c55d371b906ab9f16dd460606dccda74850de37"},
+ {file = "aiocsv-1.2.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e088937657a99bcd346a22bc0948b29d4ffa1d60976dc1a37d4df8104a08f2ab"},
+ {file = "aiocsv-1.2.4-cp39-cp39-win_amd64.whl", hash = "sha256:d33222cef014d396400b26011c3456cc1abaf1b2ca344c2945969fbd48c5f9e8"},
+ {file = "aiocsv-1.2.4.tar.gz", hash = "sha256:f592cb62ef52471ca42a762e89d0a02fcde395067041a7d70972aefa722c0fcd"},
+]
+
+[[package]]
+name = "aiofiles"
+version = "23.2.1"
+description = "File support for asyncio."
+category = "main"
+optional = true
+python-versions = ">=3.7"
+files = [
+ {file = "aiofiles-23.2.1-py3-none-any.whl", hash = "sha256:19297512c647d4b27a2cf7c34caa7e405c0d60b5560618a29a9fe027b18b0107"},
+ {file = "aiofiles-23.2.1.tar.gz", hash = "sha256:84ec2218d8419404abcb9f0c02df3f34c6e0a68ed41072acfb1cef5cbc29051a"},
+]
+
+[[package]]
+name = "aiohttp"
+version = "3.8.5"
+description = "Async http client/server framework (asyncio)"
+category = "main"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "aiohttp-3.8.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a94159871304770da4dd371f4291b20cac04e8c94f11bdea1c3478e557fbe0d8"},
+ {file = "aiohttp-3.8.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:13bf85afc99ce6f9ee3567b04501f18f9f8dbbb2ea11ed1a2e079670403a7c84"},
+ {file = "aiohttp-3.8.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2ce2ac5708501afc4847221a521f7e4b245abf5178cf5ddae9d5b3856ddb2f3a"},
+ {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96943e5dcc37a6529d18766597c491798b7eb7a61d48878611298afc1fca946c"},
+ {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ad5c3c4590bb3cc28b4382f031f3783f25ec223557124c68754a2231d989e2b"},
+ {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0c413c633d0512df4dc7fd2373ec06cc6a815b7b6d6c2f208ada7e9e93a5061d"},
+ {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df72ac063b97837a80d80dec8d54c241af059cc9bb42c4de68bd5b61ceb37caa"},
+ {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c48c5c0271149cfe467c0ff8eb941279fd6e3f65c9a388c984e0e6cf57538e14"},
+ {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:368a42363c4d70ab52c2c6420a57f190ed3dfaca6a1b19afda8165ee16416a82"},
+ {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7607ec3ce4993464368505888af5beb446845a014bc676d349efec0e05085905"},
+ {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0d21c684808288a98914e5aaf2a7c6a3179d4df11d249799c32d1808e79503b5"},
+ {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:312fcfbacc7880a8da0ae8b6abc6cc7d752e9caa0051a53d217a650b25e9a691"},
+ {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ad093e823df03bb3fd37e7dec9d4670c34f9e24aeace76808fc20a507cace825"},
+ {file = "aiohttp-3.8.5-cp310-cp310-win32.whl", hash = "sha256:33279701c04351a2914e1100b62b2a7fdb9a25995c4a104259f9a5ead7ed4802"},
+ {file = "aiohttp-3.8.5-cp310-cp310-win_amd64.whl", hash = "sha256:6e4a280e4b975a2e7745573e3fc9c9ba0d1194a3738ce1cbaa80626cc9b4f4df"},
+ {file = "aiohttp-3.8.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ae871a964e1987a943d83d6709d20ec6103ca1eaf52f7e0d36ee1b5bebb8b9b9"},
+ {file = "aiohttp-3.8.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:461908b2578955045efde733719d62f2b649c404189a09a632d245b445c9c975"},
+ {file = "aiohttp-3.8.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:72a860c215e26192379f57cae5ab12b168b75db8271f111019509a1196dfc780"},
+ {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc14be025665dba6202b6a71cfcdb53210cc498e50068bc088076624471f8bb9"},
+ {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8af740fc2711ad85f1a5c034a435782fbd5b5f8314c9a3ef071424a8158d7f6b"},
+ {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:841cd8233cbd2111a0ef0a522ce016357c5e3aff8a8ce92bcfa14cef890d698f"},
+ {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ed1c46fb119f1b59304b5ec89f834f07124cd23ae5b74288e364477641060ff"},
+ {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84f8ae3e09a34f35c18fa57f015cc394bd1389bce02503fb30c394d04ee6b938"},
+ {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62360cb771707cb70a6fd114b9871d20d7dd2163a0feafe43fd115cfe4fe845e"},
+ {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:23fb25a9f0a1ca1f24c0a371523546366bb642397c94ab45ad3aedf2941cec6a"},
+ {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:b0ba0d15164eae3d878260d4c4df859bbdc6466e9e6689c344a13334f988bb53"},
+ {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5d20003b635fc6ae3f96d7260281dfaf1894fc3aa24d1888a9b2628e97c241e5"},
+ {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0175d745d9e85c40dcc51c8f88c74bfbaef9e7afeeeb9d03c37977270303064c"},
+ {file = "aiohttp-3.8.5-cp311-cp311-win32.whl", hash = "sha256:2e1b1e51b0774408f091d268648e3d57f7260c1682e7d3a63cb00d22d71bb945"},
+ {file = "aiohttp-3.8.5-cp311-cp311-win_amd64.whl", hash = "sha256:043d2299f6dfdc92f0ac5e995dfc56668e1587cea7f9aa9d8a78a1b6554e5755"},
+ {file = "aiohttp-3.8.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:cae533195e8122584ec87531d6df000ad07737eaa3c81209e85c928854d2195c"},
+ {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f21e83f355643c345177a5d1d8079f9f28b5133bcd154193b799d380331d5d3"},
+ {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a7a75ef35f2df54ad55dbf4b73fe1da96f370e51b10c91f08b19603c64004acc"},
+ {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2e2e9839e14dd5308ee773c97115f1e0a1cb1d75cbeeee9f33824fa5144c7634"},
+ {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44e65da1de4403d0576473e2344828ef9c4c6244d65cf4b75549bb46d40b8dd"},
+ {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:78d847e4cde6ecc19125ccbc9bfac4a7ab37c234dd88fbb3c5c524e8e14da543"},
+ {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:c7a815258e5895d8900aec4454f38dca9aed71085f227537208057853f9d13f2"},
+ {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:8b929b9bd7cd7c3939f8bcfffa92fae7480bd1aa425279d51a89327d600c704d"},
+ {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:5db3a5b833764280ed7618393832e0853e40f3d3e9aa128ac0ba0f8278d08649"},
+ {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:a0215ce6041d501f3155dc219712bc41252d0ab76474615b9700d63d4d9292af"},
+ {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:fd1ed388ea7fbed22c4968dd64bab0198de60750a25fe8c0c9d4bef5abe13824"},
+ {file = "aiohttp-3.8.5-cp36-cp36m-win32.whl", hash = "sha256:6e6783bcc45f397fdebc118d772103d751b54cddf5b60fbcc958382d7dd64f3e"},
+ {file = "aiohttp-3.8.5-cp36-cp36m-win_amd64.whl", hash = "sha256:b5411d82cddd212644cf9360879eb5080f0d5f7d809d03262c50dad02f01421a"},
+ {file = "aiohttp-3.8.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:01d4c0c874aa4ddfb8098e85d10b5e875a70adc63db91f1ae65a4b04d3344cda"},
+ {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5980a746d547a6ba173fd5ee85ce9077e72d118758db05d229044b469d9029a"},
+ {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2a482e6da906d5e6e653be079b29bc173a48e381600161c9932d89dfae5942ef"},
+ {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80bd372b8d0715c66c974cf57fe363621a02f359f1ec81cba97366948c7fc873"},
+ {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1161b345c0a444ebcf46bf0a740ba5dcf50612fd3d0528883fdc0eff578006a"},
+ {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd56db019015b6acfaaf92e1ac40eb8434847d9bf88b4be4efe5bfd260aee692"},
+ {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:153c2549f6c004d2754cc60603d4668899c9895b8a89397444a9c4efa282aaf4"},
+ {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4a01951fabc4ce26ab791da5f3f24dca6d9a6f24121746eb19756416ff2d881b"},
+ {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bfb9162dcf01f615462b995a516ba03e769de0789de1cadc0f916265c257e5d8"},
+ {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:7dde0009408969a43b04c16cbbe252c4f5ef4574ac226bc8815cd7342d2028b6"},
+ {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4149d34c32f9638f38f544b3977a4c24052042affa895352d3636fa8bffd030a"},
+ {file = "aiohttp-3.8.5-cp37-cp37m-win32.whl", hash = "sha256:68c5a82c8779bdfc6367c967a4a1b2aa52cd3595388bf5961a62158ee8a59e22"},
+ {file = "aiohttp-3.8.5-cp37-cp37m-win_amd64.whl", hash = "sha256:2cf57fb50be5f52bda004b8893e63b48530ed9f0d6c96c84620dc92fe3cd9b9d"},
+ {file = "aiohttp-3.8.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:eca4bf3734c541dc4f374ad6010a68ff6c6748f00451707f39857f429ca36ced"},
+ {file = "aiohttp-3.8.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1274477e4c71ce8cfe6c1ec2f806d57c015ebf84d83373676036e256bc55d690"},
+ {file = "aiohttp-3.8.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:28c543e54710d6158fc6f439296c7865b29e0b616629767e685a7185fab4a6b9"},
+ {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:910bec0c49637d213f5d9877105d26e0c4a4de2f8b1b29405ff37e9fc0ad52b8"},
+ {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5443910d662db951b2e58eb70b0fbe6b6e2ae613477129a5805d0b66c54b6cb7"},
+ {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2e460be6978fc24e3df83193dc0cc4de46c9909ed92dd47d349a452ef49325b7"},
+ {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb1558def481d84f03b45888473fc5a1f35747b5f334ef4e7a571bc0dfcb11f8"},
+ {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34dd0c107799dcbbf7d48b53be761a013c0adf5571bf50c4ecad5643fe9cfcd0"},
+ {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aa1990247f02a54185dc0dff92a6904521172a22664c863a03ff64c42f9b5410"},
+ {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0e584a10f204a617d71d359fe383406305a4b595b333721fa50b867b4a0a1548"},
+ {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:a3cf433f127efa43fee6b90ea4c6edf6c4a17109d1d037d1a52abec84d8f2e42"},
+ {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:c11f5b099adafb18e65c2c997d57108b5bbeaa9eeee64a84302c0978b1ec948b"},
+ {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:84de26ddf621d7ac4c975dbea4c945860e08cccde492269db4e1538a6a6f3c35"},
+ {file = "aiohttp-3.8.5-cp38-cp38-win32.whl", hash = "sha256:ab88bafedc57dd0aab55fa728ea10c1911f7e4d8b43e1d838a1739f33712921c"},
+ {file = "aiohttp-3.8.5-cp38-cp38-win_amd64.whl", hash = "sha256:5798a9aad1879f626589f3df0f8b79b3608a92e9beab10e5fda02c8a2c60db2e"},
+ {file = "aiohttp-3.8.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a6ce61195c6a19c785df04e71a4537e29eaa2c50fe745b732aa937c0c77169f3"},
+ {file = "aiohttp-3.8.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:773dd01706d4db536335fcfae6ea2440a70ceb03dd3e7378f3e815b03c97ab51"},
+ {file = "aiohttp-3.8.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f83a552443a526ea38d064588613aca983d0ee0038801bc93c0c916428310c28"},
+ {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f7372f7341fcc16f57b2caded43e81ddd18df53320b6f9f042acad41f8e049a"},
+ {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea353162f249c8097ea63c2169dd1aa55de1e8fecbe63412a9bc50816e87b761"},
+ {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5d47ae48db0b2dcf70bc8a3bc72b3de86e2a590fc299fdbbb15af320d2659de"},
+ {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d827176898a2b0b09694fbd1088c7a31836d1a505c243811c87ae53a3f6273c1"},
+ {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3562b06567c06439d8b447037bb655ef69786c590b1de86c7ab81efe1c9c15d8"},
+ {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4e874cbf8caf8959d2adf572a78bba17cb0e9d7e51bb83d86a3697b686a0ab4d"},
+ {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6809a00deaf3810e38c628e9a33271892f815b853605a936e2e9e5129762356c"},
+ {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:33776e945d89b29251b33a7e7d006ce86447b2cfd66db5e5ded4e5cd0340585c"},
+ {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:eaeed7abfb5d64c539e2db173f63631455f1196c37d9d8d873fc316470dfbacd"},
+ {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e91d635961bec2d8f19dfeb41a539eb94bd073f075ca6dae6c8dc0ee89ad6f91"},
+ {file = "aiohttp-3.8.5-cp39-cp39-win32.whl", hash = "sha256:00ad4b6f185ec67f3e6562e8a1d2b69660be43070bd0ef6fcec5211154c7df67"},
+ {file = "aiohttp-3.8.5-cp39-cp39-win_amd64.whl", hash = "sha256:c0a9034379a37ae42dea7ac1e048352d96286626251862e448933c0f59cbd79c"},
+ {file = "aiohttp-3.8.5.tar.gz", hash = "sha256:b9552ec52cc147dbf1944ac7ac98af7602e51ea2dcd076ed194ca3c0d1c7d0bc"},
+]
+
+[package.dependencies]
+aiosignal = ">=1.1.2"
+async-timeout = ">=4.0.0a3,<5.0"
+attrs = ">=17.3.0"
+charset-normalizer = ">=2.0,<4.0"
+frozenlist = ">=1.1.1"
+multidict = ">=4.5,<7.0"
+yarl = ">=1.0,<2.0"
+
+[package.extras]
+speedups = ["Brotli", "aiodns", "cchardet"]
+
+[[package]]
+name = "aioitertools"
+version = "0.11.0"
+description = "itertools and builtins for AsyncIO and mixed iterables"
+category = "main"
+optional = true
+python-versions = ">=3.6"
+files = [
+ {file = "aioitertools-0.11.0-py3-none-any.whl", hash = "sha256:04b95e3dab25b449def24d7df809411c10e62aab0cbe31a50ca4e68748c43394"},
+ {file = "aioitertools-0.11.0.tar.gz", hash = "sha256:42c68b8dd3a69c2bf7f2233bf7df4bb58b557bca5252ac02ed5187bbc67d6831"},
+]
+
+[[package]]
+name = "aiolimiter"
+version = "1.1.0"
+description = "asyncio rate limiter, a leaky bucket implementation"
+category = "main"
+optional = true
+python-versions = ">=3.7,<4.0"
+files = [
+ {file = "aiolimiter-1.1.0-py3-none-any.whl", hash = "sha256:0b4997961fc58b8df40279e739f9cf0d3e255e63e9a44f64df567a8c17241e24"},
+ {file = "aiolimiter-1.1.0.tar.gz", hash = "sha256:461cf02f82a29347340d031626c92853645c099cb5ff85577b831a7bd21132b5"},
+]
+
+[[package]]
+name = "aioresponses"
+version = "0.7.4"
+description = "Mock out requests made by ClientSession from aiohttp package"
+category = "dev"
+optional = false
+python-versions = "*"
+files = [
+ {file = "aioresponses-0.7.4-py2.py3-none-any.whl", hash = "sha256:1160486b5ea96fcae6170cf2bdef029b9d3a283b7dbeabb3d7f1182769bfb6b7"},
+ {file = "aioresponses-0.7.4.tar.gz", hash = "sha256:9b8c108b36354c04633bad0ea752b55d956a7602fe3e3234b939fc44af96f1d8"},
+]
+
+[package.dependencies]
+aiohttp = ">=2.0.0,<4.0.0"
+
+[[package]]
+name = "aiosignal"
+version = "1.3.1"
+description = "aiosignal: a list of registered asynchronous callbacks"
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"},
+ {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"},
+]
+
+[package.dependencies]
+frozenlist = ">=1.1.0"
[[package]]
name = "arrow"
version = "1.2.3"
description = "Better dates & times for Python"
+category = "main"
optional = false
python-versions = ">=3.6"
files = [
@@ -18,6 +251,7 @@ python-dateutil = ">=2.7.0"
name = "astroid"
version = "2.15.6"
description = "An abstract syntax tree for Python with inference support."
+category = "dev"
optional = false
python-versions = ">=3.7.2"
files = [
@@ -33,10 +267,42 @@ wrapt = [
{version = ">=1.14,<2", markers = "python_version >= \"3.11\""},
]
+[[package]]
+name = "async-timeout"
+version = "4.0.3"
+description = "Timeout context manager for asyncio programs"
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"},
+ {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"},
+]
+
+[[package]]
+name = "attrs"
+version = "23.1.0"
+description = "Classes Without Boilerplate"
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"},
+ {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"},
+]
+
+[package.extras]
+cov = ["attrs[tests]", "coverage[toml] (>=5.3)"]
+dev = ["attrs[docs,tests]", "pre-commit"]
+docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"]
+tests = ["attrs[tests-no-zope]", "zope-interface"]
+tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
+
[[package]]
name = "binaryornot"
version = "0.4.4"
description = "Ultra-lightweight pure Python package to check if a file is binary or text."
+category = "main"
optional = false
python-versions = "*"
files = [
@@ -51,6 +317,7 @@ chardet = ">=3.0.2"
name = "black"
version = "23.9.1"
description = "The uncompromising code formatter."
+category = "main"
optional = false
python-versions = ">=3.8"
files = [
@@ -95,17 +362,18 @@ uvloop = ["uvloop (>=0.15.2)"]
[[package]]
name = "boto3"
-version = "1.28.46"
+version = "1.28.17"
description = "The AWS SDK for Python"
+category = "main"
optional = false
python-versions = ">= 3.7"
files = [
- {file = "boto3-1.28.46-py3-none-any.whl", hash = "sha256:04445d70127c25fad69e2cab7e3f5cb219c8d6e60463af3657f20e29ac517957"},
- {file = "boto3-1.28.46.tar.gz", hash = "sha256:2ca2852f7b7c1bc2e56f10f968d4c8483c8228b935ecd89a444ae8292ad0dc24"},
+ {file = "boto3-1.28.17-py3-none-any.whl", hash = "sha256:bca0526f819e0f19c0f1e6eba3e2d1d6b6a92a45129f98c0d716e5aab6d9444b"},
+ {file = "boto3-1.28.17.tar.gz", hash = "sha256:90f7cfb5e1821af95b1fc084bc50e6c47fa3edc99f32de1a2591faa0c546bea7"},
]
[package.dependencies]
-botocore = ">=1.31.46,<1.32.0"
+botocore = ">=1.31.17,<1.32.0"
jmespath = ">=0.7.1,<2.0.0"
s3transfer = ">=0.6.0,<0.7.0"
@@ -114,13 +382,14 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"]
[[package]]
name = "botocore"
-version = "1.31.46"
+version = "1.31.17"
description = "Low-level, data-driven core of boto 3."
+category = "main"
optional = false
python-versions = ">= 3.7"
files = [
- {file = "botocore-1.31.46-py3-none-any.whl", hash = "sha256:ac0c1258b1782cde42950bd00138fdce6bd7d04855296af8c326d5844a426473"},
- {file = "botocore-1.31.46.tar.gz", hash = "sha256:6c30be3371624a80d6a881d9c7771a80e0eb82697ee374aaf522cd59b76e14dd"},
+ {file = "botocore-1.31.17-py3-none-any.whl", hash = "sha256:6ac34a1d34aa3750e78b77b8596617e2bab938964694d651939dba2cbde2c12b"},
+ {file = "botocore-1.31.17.tar.gz", hash = "sha256:396459065dba4339eb4da4ec8b4e6599728eb89b7caaceea199e26f7d824a41c"},
]
[package.dependencies]
@@ -135,6 +404,7 @@ crt = ["awscrt (==0.16.26)"]
name = "certifi"
version = "2023.7.22"
description = "Python package for providing Mozilla's CA Bundle."
+category = "main"
optional = false
python-versions = ">=3.6"
files = [
@@ -142,10 +412,23 @@ files = [
{file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"},
]
+[[package]]
+name = "cfgv"
+version = "3.4.0"
+description = "Validate configuration and produce human readable error messages."
+category = "dev"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"},
+ {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"},
+]
+
[[package]]
name = "chardet"
version = "5.2.0"
description = "Universal encoding detector for Python 3"
+category = "main"
optional = false
python-versions = ">=3.7"
files = [
@@ -157,6 +440,7 @@ files = [
name = "charset-normalizer"
version = "3.2.0"
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
+category = "main"
optional = false
python-versions = ">=3.7.0"
files = [
@@ -241,6 +525,7 @@ files = [
name = "click"
version = "8.1.7"
description = "Composable command line interface toolkit"
+category = "main"
optional = false
python-versions = ">=3.7"
files = [
@@ -255,6 +540,7 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""}
name = "colorama"
version = "0.4.6"
description = "Cross-platform colored terminal text."
+category = "main"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
files = [
@@ -266,6 +552,7 @@ files = [
name = "commonmark"
version = "0.9.1"
description = "Python parser for the CommonMark Markdown spec"
+category = "main"
optional = false
python-versions = "*"
files = [
@@ -280,6 +567,7 @@ test = ["flake8 (==3.7.8)", "hypothesis (==3.55.3)"]
name = "cookiecutter"
version = "2.3.0"
description = "A command-line utility that creates projects from project templates, e.g. creating a Python package project from a Python package project template."
+category = "main"
optional = false
python-versions = ">=3.7"
files = [
@@ -301,6 +589,7 @@ rich = "*"
name = "coverage"
version = "7.3.1"
description = "Code coverage measurement for Python"
+category = "dev"
optional = false
python-versions = ">=3.8"
files = [
@@ -368,6 +657,7 @@ toml = ["tomli"]
name = "dill"
version = "0.3.7"
description = "serialize all of Python"
+category = "dev"
optional = false
python-versions = ">=3.7"
files = [
@@ -378,10 +668,23 @@ files = [
[package.extras]
graph = ["objgraph (>=1.7.2)"]
+[[package]]
+name = "distlib"
+version = "0.3.7"
+description = "Distribution utilities"
+category = "dev"
+optional = false
+python-versions = "*"
+files = [
+ {file = "distlib-0.3.7-py2.py3-none-any.whl", hash = "sha256:2e24928bc811348f0feb63014e97aaae3037f2cf48712d51ae61df7fd6075057"},
+ {file = "distlib-0.3.7.tar.gz", hash = "sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8"},
+]
+
[[package]]
name = "exceptiongroup"
version = "1.1.3"
description = "Backport of PEP 654 (exception groups)"
+category = "dev"
optional = false
python-versions = ">=3.7"
files = [
@@ -392,10 +695,144 @@ files = [
[package.extras]
test = ["pytest (>=6)"]
+[[package]]
+name = "execnet"
+version = "2.0.2"
+description = "execnet: rapid multi-Python deployment"
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "execnet-2.0.2-py3-none-any.whl", hash = "sha256:88256416ae766bc9e8895c76a87928c0012183da3cc4fc18016e6f050e025f41"},
+ {file = "execnet-2.0.2.tar.gz", hash = "sha256:cc59bc4423742fd71ad227122eb0dd44db51efb3dc4095b45ac9a08c770096af"},
+]
+
+[package.extras]
+testing = ["hatch", "pre-commit", "pytest", "tox"]
+
+[[package]]
+name = "faker"
+version = "19.6.1"
+description = "Faker is a Python package that generates fake data for you."
+category = "dev"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "Faker-19.6.1-py3-none-any.whl", hash = "sha256:64c8513c53c3a809075ee527b323a0ba61517814123f3137e4912f5d43350139"},
+ {file = "Faker-19.6.1.tar.gz", hash = "sha256:5d6b7880b3bea708075ddf91938424453f07053a59f8fa0453c1870df6ff3292"},
+]
+
+[package.dependencies]
+python-dateutil = ">=2.4"
+
+[[package]]
+name = "filelock"
+version = "3.12.4"
+description = "A platform independent file lock."
+category = "dev"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "filelock-3.12.4-py3-none-any.whl", hash = "sha256:08c21d87ded6e2b9da6728c3dff51baf1dcecf973b768ef35bcbc3447edb9ad4"},
+ {file = "filelock-3.12.4.tar.gz", hash = "sha256:2e6f249f1f3654291606e046b09f1fd5eac39b360664c27f5aad072012f8bcbd"},
+]
+
+[package.extras]
+docs = ["furo (>=2023.7.26)", "sphinx (>=7.1.2)", "sphinx-autodoc-typehints (>=1.24)"]
+testing = ["covdefaults (>=2.3)", "coverage (>=7.3)", "diff-cover (>=7.7)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)", "pytest-timeout (>=2.1)"]
+typing = ["typing-extensions (>=4.7.1)"]
+
+[[package]]
+name = "frozenlist"
+version = "1.4.0"
+description = "A list-like structure which implements collections.abc.MutableSequence"
+category = "main"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "frozenlist-1.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:764226ceef3125e53ea2cb275000e309c0aa5464d43bd72abd661e27fffc26ab"},
+ {file = "frozenlist-1.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d6484756b12f40003c6128bfcc3fa9f0d49a687e171186c2d85ec82e3758c559"},
+ {file = "frozenlist-1.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9ac08e601308e41eb533f232dbf6b7e4cea762f9f84f6357136eed926c15d12c"},
+ {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d081f13b095d74b67d550de04df1c756831f3b83dc9881c38985834387487f1b"},
+ {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:71932b597f9895f011f47f17d6428252fc728ba2ae6024e13c3398a087c2cdea"},
+ {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:981b9ab5a0a3178ff413bca62526bb784249421c24ad7381e39d67981be2c326"},
+ {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e41f3de4df3e80de75845d3e743b3f1c4c8613c3997a912dbf0229fc61a8b963"},
+ {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6918d49b1f90821e93069682c06ffde41829c346c66b721e65a5c62b4bab0300"},
+ {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0e5c8764c7829343d919cc2dfc587a8db01c4f70a4ebbc49abde5d4b158b007b"},
+ {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8d0edd6b1c7fb94922bf569c9b092ee187a83f03fb1a63076e7774b60f9481a8"},
+ {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e29cda763f752553fa14c68fb2195150bfab22b352572cb36c43c47bedba70eb"},
+ {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:0c7c1b47859ee2cac3846fde1c1dc0f15da6cec5a0e5c72d101e0f83dcb67ff9"},
+ {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:901289d524fdd571be1c7be054f48b1f88ce8dddcbdf1ec698b27d4b8b9e5d62"},
+ {file = "frozenlist-1.4.0-cp310-cp310-win32.whl", hash = "sha256:1a0848b52815006ea6596c395f87449f693dc419061cc21e970f139d466dc0a0"},
+ {file = "frozenlist-1.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:b206646d176a007466358aa21d85cd8600a415c67c9bd15403336c331a10d956"},
+ {file = "frozenlist-1.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:de343e75f40e972bae1ef6090267f8260c1446a1695e77096db6cfa25e759a95"},
+ {file = "frozenlist-1.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ad2a9eb6d9839ae241701d0918f54c51365a51407fd80f6b8289e2dfca977cc3"},
+ {file = "frozenlist-1.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bd7bd3b3830247580de99c99ea2a01416dfc3c34471ca1298bccabf86d0ff4dc"},
+ {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bdf1847068c362f16b353163391210269e4f0569a3c166bc6a9f74ccbfc7e839"},
+ {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38461d02d66de17455072c9ba981d35f1d2a73024bee7790ac2f9e361ef1cd0c"},
+ {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5a32087d720c608f42caed0ef36d2b3ea61a9d09ee59a5142d6070da9041b8f"},
+ {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dd65632acaf0d47608190a71bfe46b209719bf2beb59507db08ccdbe712f969b"},
+ {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:261b9f5d17cac914531331ff1b1d452125bf5daa05faf73b71d935485b0c510b"},
+ {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b89ac9768b82205936771f8d2eb3ce88503b1556324c9f903e7156669f521472"},
+ {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:008eb8b31b3ea6896da16c38c1b136cb9fec9e249e77f6211d479db79a4eaf01"},
+ {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e74b0506fa5aa5598ac6a975a12aa8928cbb58e1f5ac8360792ef15de1aa848f"},
+ {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:490132667476f6781b4c9458298b0c1cddf237488abd228b0b3650e5ecba7467"},
+ {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:76d4711f6f6d08551a7e9ef28c722f4a50dd0fc204c56b4bcd95c6cc05ce6fbb"},
+ {file = "frozenlist-1.4.0-cp311-cp311-win32.whl", hash = "sha256:a02eb8ab2b8f200179b5f62b59757685ae9987996ae549ccf30f983f40602431"},
+ {file = "frozenlist-1.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:515e1abc578dd3b275d6a5114030b1330ba044ffba03f94091842852f806f1c1"},
+ {file = "frozenlist-1.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f0ed05f5079c708fe74bf9027e95125334b6978bf07fd5ab923e9e55e5fbb9d3"},
+ {file = "frozenlist-1.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ca265542ca427bf97aed183c1676e2a9c66942e822b14dc6e5f42e038f92a503"},
+ {file = "frozenlist-1.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:491e014f5c43656da08958808588cc6c016847b4360e327a62cb308c791bd2d9"},
+ {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17ae5cd0f333f94f2e03aaf140bb762c64783935cc764ff9c82dff626089bebf"},
+ {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e78fb68cf9c1a6aa4a9a12e960a5c9dfbdb89b3695197aa7064705662515de2"},
+ {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5655a942f5f5d2c9ed93d72148226d75369b4f6952680211972a33e59b1dfdc"},
+ {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c11b0746f5d946fecf750428a95f3e9ebe792c1ee3b1e96eeba145dc631a9672"},
+ {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e66d2a64d44d50d2543405fb183a21f76b3b5fd16f130f5c99187c3fb4e64919"},
+ {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:88f7bc0fcca81f985f78dd0fa68d2c75abf8272b1f5c323ea4a01a4d7a614efc"},
+ {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5833593c25ac59ede40ed4de6d67eb42928cca97f26feea219f21d0ed0959b79"},
+ {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:fec520865f42e5c7f050c2a79038897b1c7d1595e907a9e08e3353293ffc948e"},
+ {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:b826d97e4276750beca7c8f0f1a4938892697a6bcd8ec8217b3312dad6982781"},
+ {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ceb6ec0a10c65540421e20ebd29083c50e6d1143278746a4ef6bcf6153171eb8"},
+ {file = "frozenlist-1.4.0-cp38-cp38-win32.whl", hash = "sha256:2b8bcf994563466db019fab287ff390fffbfdb4f905fc77bc1c1d604b1c689cc"},
+ {file = "frozenlist-1.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:a6c8097e01886188e5be3e6b14e94ab365f384736aa1fca6a0b9e35bd4a30bc7"},
+ {file = "frozenlist-1.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6c38721585f285203e4b4132a352eb3daa19121a035f3182e08e437cface44bf"},
+ {file = "frozenlist-1.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a0c6da9aee33ff0b1a451e867da0c1f47408112b3391dd43133838339e410963"},
+ {file = "frozenlist-1.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:93ea75c050c5bb3d98016b4ba2497851eadf0ac154d88a67d7a6816206f6fa7f"},
+ {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f61e2dc5ad442c52b4887f1fdc112f97caeff4d9e6ebe78879364ac59f1663e1"},
+ {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa384489fefeb62321b238e64c07ef48398fe80f9e1e6afeff22e140e0850eef"},
+ {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:10ff5faaa22786315ef57097a279b833ecab1a0bfb07d604c9cbb1c4cdc2ed87"},
+ {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:007df07a6e3eb3e33e9a1fe6a9db7af152bbd8a185f9aaa6ece10a3529e3e1c6"},
+ {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f4f399d28478d1f604c2ff9119907af9726aed73680e5ed1ca634d377abb087"},
+ {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c5374b80521d3d3f2ec5572e05adc94601985cc526fb276d0c8574a6d749f1b3"},
+ {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ce31ae3e19f3c902de379cf1323d90c649425b86de7bbdf82871b8a2a0615f3d"},
+ {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7211ef110a9194b6042449431e08c4d80c0481e5891e58d429df5899690511c2"},
+ {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:556de4430ce324c836789fa4560ca62d1591d2538b8ceb0b4f68fb7b2384a27a"},
+ {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7645a8e814a3ee34a89c4a372011dcd817964ce8cb273c8ed6119d706e9613e3"},
+ {file = "frozenlist-1.4.0-cp39-cp39-win32.whl", hash = "sha256:19488c57c12d4e8095a922f328df3f179c820c212940a498623ed39160bc3c2f"},
+ {file = "frozenlist-1.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:6221d84d463fb110bdd7619b69cb43878a11d51cbb9394ae3105d082d5199167"},
+ {file = "frozenlist-1.4.0.tar.gz", hash = "sha256:09163bdf0b2907454042edb19f887c6d33806adc71fbd54afc14908bfdc22251"},
+]
+
+[[package]]
+name = "identify"
+version = "2.5.29"
+description = "File identification library for Python"
+category = "dev"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "identify-2.5.29-py2.py3-none-any.whl", hash = "sha256:24437fbf6f4d3fe6efd0eb9d67e24dd9106db99af5ceb27996a5f7895f24bf1b"},
+ {file = "identify-2.5.29.tar.gz", hash = "sha256:d43d52b86b15918c137e3a74fff5224f60385cd0e9c38e99d07c257f02f151a5"},
+]
+
+[package.extras]
+license = ["ukkonen"]
+
[[package]]
name = "idna"
version = "3.4"
description = "Internationalized Domain Names in Applications (IDNA)"
+category = "main"
optional = false
python-versions = ">=3.5"
files = [
@@ -407,6 +844,7 @@ files = [
name = "iniconfig"
version = "2.0.0"
description = "brain-dead simple config-ini parsing"
+category = "dev"
optional = false
python-versions = ">=3.7"
files = [
@@ -418,6 +856,7 @@ files = [
name = "isort"
version = "5.12.0"
description = "A Python utility / library to sort Python imports."
+category = "dev"
optional = false
python-versions = ">=3.8.0"
files = [
@@ -435,6 +874,7 @@ requirements-deprecated-finder = ["pip-api", "pipreqs"]
name = "jinja2"
version = "3.1.2"
description = "A very fast and expressive template engine."
+category = "main"
optional = false
python-versions = ">=3.7"
files = [
@@ -452,6 +892,7 @@ i18n = ["Babel (>=2.7)"]
name = "jmespath"
version = "1.0.1"
description = "JSON Matching Expressions"
+category = "main"
optional = false
python-versions = ">=3.7"
files = [
@@ -463,6 +904,7 @@ files = [
name = "lazy-object-proxy"
version = "1.9.0"
description = "A fast and thorough lazy object proxy."
+category = "dev"
optional = false
python-versions = ">=3.7"
files = [
@@ -504,10 +946,30 @@ files = [
{file = "lazy_object_proxy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:db1c1722726f47e10e0b5fdbf15ac3b8adb58c091d12b3ab713965795036985f"},
]
+[[package]]
+name = "loguru"
+version = "0.7.2"
+description = "Python logging made (stupidly) simple"
+category = "main"
+optional = true
+python-versions = ">=3.5"
+files = [
+ {file = "loguru-0.7.2-py3-none-any.whl", hash = "sha256:003d71e3d3ed35f0f8984898359d65b79e5b21943f78af86aa5491210429b8eb"},
+ {file = "loguru-0.7.2.tar.gz", hash = "sha256:e671a53522515f34fd406340ee968cb9ecafbc4b36c679da03c18fd8d0bd51ac"},
+]
+
+[package.dependencies]
+colorama = {version = ">=0.3.4", markers = "sys_platform == \"win32\""}
+win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""}
+
+[package.extras]
+dev = ["Sphinx (==7.2.5)", "colorama (==0.4.5)", "colorama (==0.4.6)", "exceptiongroup (==1.1.3)", "freezegun (==1.1.0)", "freezegun (==1.2.2)", "mypy (==v0.910)", "mypy (==v0.971)", "mypy (==v1.4.1)", "mypy (==v1.5.1)", "pre-commit (==3.4.0)", "pytest (==6.1.2)", "pytest (==7.4.0)", "pytest-cov (==2.12.1)", "pytest-cov (==4.1.0)", "pytest-mypy-plugins (==1.9.3)", "pytest-mypy-plugins (==3.0.0)", "sphinx-autobuild (==2021.3.14)", "sphinx-rtd-theme (==1.3.0)", "tox (==3.27.1)", "tox (==4.11.0)"]
+
[[package]]
name = "lxml"
version = "4.9.3"
description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API."
+category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*"
files = [
@@ -615,6 +1077,7 @@ source = ["Cython (>=0.29.35)"]
name = "markupsafe"
version = "2.1.3"
description = "Safely add untrusted strings to HTML/XML markup."
+category = "main"
optional = false
python-versions = ">=3.7"
files = [
@@ -674,6 +1137,7 @@ files = [
name = "mccabe"
version = "0.7.0"
description = "McCabe checker, plugin for flake8"
+category = "dev"
optional = false
python-versions = ">=3.6"
files = [
@@ -681,10 +1145,95 @@ files = [
{file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"},
]
+[[package]]
+name = "multidict"
+version = "6.0.4"
+description = "multidict implementation"
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"},
+ {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171"},
+ {file = "multidict-6.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7"},
+ {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c048099e4c9e9d615545e2001d3d8a4380bd403e1a0578734e0d31703d1b0c0b"},
+ {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea20853c6dbbb53ed34cb4d080382169b6f4554d394015f1bef35e881bf83547"},
+ {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16d232d4e5396c2efbbf4f6d4df89bfa905eb0d4dc5b3549d872ab898451f569"},
+ {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93"},
+ {file = "multidict-6.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64bdf1086b6043bf519869678f5f2757f473dee970d7abf6da91ec00acb9cb98"},
+ {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:43644e38f42e3af682690876cff722d301ac585c5b9e1eacc013b7a3f7b696a0"},
+ {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7582a1d1030e15422262de9f58711774e02fa80df0d1578995c76214f6954988"},
+ {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ddff9c4e225a63a5afab9dd15590432c22e8057e1a9a13d28ed128ecf047bbdc"},
+ {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ee2a1ece51b9b9e7752e742cfb661d2a29e7bcdba2d27e66e28a99f1890e4fa0"},
+ {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a2e4369eb3d47d2034032a26c7a80fcb21a2cb22e1173d761a162f11e562caa5"},
+ {file = "multidict-6.0.4-cp310-cp310-win32.whl", hash = "sha256:574b7eae1ab267e5f8285f0fe881f17efe4b98c39a40858247720935b893bba8"},
+ {file = "multidict-6.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:4dcbb0906e38440fa3e325df2359ac6cb043df8e58c965bb45f4e406ecb162cc"},
+ {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0dfad7a5a1e39c53ed00d2dd0c2e36aed4650936dc18fd9a1826a5ae1cad6f03"},
+ {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:64da238a09d6039e3bd39bb3aee9c21a5e34f28bfa5aa22518581f910ff94af3"},
+ {file = "multidict-6.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff959bee35038c4624250473988b24f846cbeb2c6639de3602c073f10410ceba"},
+ {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01a3a55bd90018c9c080fbb0b9f4891db37d148a0a18722b42f94694f8b6d4c9"},
+ {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5cb09abb18c1ea940fb99360ea0396f34d46566f157122c92dfa069d3e0e982"},
+ {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666daae833559deb2d609afa4490b85830ab0dfca811a98b70a205621a6109fe"},
+ {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11bdf3f5e1518b24530b8241529d2050014c884cf18b6fc69c0c2b30ca248710"},
+ {file = "multidict-6.0.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d18748f2d30f94f498e852c67d61261c643b349b9d2a581131725595c45ec6c"},
+ {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:458f37be2d9e4c95e2d8866a851663cbc76e865b78395090786f6cd9b3bbf4f4"},
+ {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b1a2eeedcead3a41694130495593a559a668f382eee0727352b9a41e1c45759a"},
+ {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7d6ae9d593ef8641544d6263c7fa6408cc90370c8cb2bbb65f8d43e5b0351d9c"},
+ {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5979b5632c3e3534e42ca6ff856bb24b2e3071b37861c2c727ce220d80eee9ed"},
+ {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dcfe792765fab89c365123c81046ad4103fcabbc4f56d1c1997e6715e8015461"},
+ {file = "multidict-6.0.4-cp311-cp311-win32.whl", hash = "sha256:3601a3cece3819534b11d4efc1eb76047488fddd0c85a3948099d5da4d504636"},
+ {file = "multidict-6.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:81a4f0b34bd92df3da93315c6a59034df95866014ac08535fc819f043bfd51f0"},
+ {file = "multidict-6.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:67040058f37a2a51ed8ea8f6b0e6ee5bd78ca67f169ce6122f3e2ec80dfe9b78"},
+ {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:853888594621e6604c978ce2a0444a1e6e70c8d253ab65ba11657659dcc9100f"},
+ {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:39ff62e7d0f26c248b15e364517a72932a611a9b75f35b45be078d81bdb86603"},
+ {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af048912e045a2dc732847d33821a9d84ba553f5c5f028adbd364dd4765092ac"},
+ {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e8b901e607795ec06c9e42530788c45ac21ef3aaa11dbd0c69de543bfb79a9"},
+ {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62501642008a8b9871ddfccbf83e4222cf8ac0d5aeedf73da36153ef2ec222d2"},
+ {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:99b76c052e9f1bc0721f7541e5e8c05db3941eb9ebe7b8553c625ef88d6eefde"},
+ {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:509eac6cf09c794aa27bcacfd4d62c885cce62bef7b2c3e8b2e49d365b5003fe"},
+ {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21a12c4eb6ddc9952c415f24eef97e3e55ba3af61f67c7bc388dcdec1404a067"},
+ {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:5cad9430ab3e2e4fa4a2ef4450f548768400a2ac635841bc2a56a2052cdbeb87"},
+ {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab55edc2e84460694295f401215f4a58597f8f7c9466faec545093045476327d"},
+ {file = "multidict-6.0.4-cp37-cp37m-win32.whl", hash = "sha256:5a4dcf02b908c3b8b17a45fb0f15b695bf117a67b76b7ad18b73cf8e92608775"},
+ {file = "multidict-6.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6ed5f161328b7df384d71b07317f4d8656434e34591f20552c7bcef27b0ab88e"},
+ {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5fc1b16f586f049820c5c5b17bb4ee7583092fa0d1c4e28b5239181ff9532e0c"},
+ {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1502e24330eb681bdaa3eb70d6358e818e8e8f908a22a1851dfd4e15bc2f8161"},
+ {file = "multidict-6.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b692f419760c0e65d060959df05f2a531945af31fda0c8a3b3195d4efd06de11"},
+ {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45e1ecb0379bfaab5eef059f50115b54571acfbe422a14f668fc8c27ba410e7e"},
+ {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddd3915998d93fbcd2566ddf9cf62cdb35c9e093075f862935573d265cf8f65d"},
+ {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:59d43b61c59d82f2effb39a93c48b845efe23a3852d201ed2d24ba830d0b4cf2"},
+ {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc8e1d0c705233c5dd0c5e6460fbad7827d5d36f310a0fadfd45cc3029762258"},
+ {file = "multidict-6.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6aa0418fcc838522256761b3415822626f866758ee0bc6632c9486b179d0b52"},
+ {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6748717bb10339c4760c1e63da040f5f29f5ed6e59d76daee30305894069a660"},
+ {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4d1a3d7ef5e96b1c9e92f973e43aa5e5b96c659c9bc3124acbbd81b0b9c8a951"},
+ {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4372381634485bec7e46718edc71528024fcdc6f835baefe517b34a33c731d60"},
+ {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:fc35cb4676846ef752816d5be2193a1e8367b4c1397b74a565a9d0389c433a1d"},
+ {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b9d9e4e2b37daddb5c23ea33a3417901fa7c7b3dee2d855f63ee67a0b21e5b1"},
+ {file = "multidict-6.0.4-cp38-cp38-win32.whl", hash = "sha256:e41b7e2b59679edfa309e8db64fdf22399eec4b0b24694e1b2104fb789207779"},
+ {file = "multidict-6.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:d6c254ba6e45d8e72739281ebc46ea5eb5f101234f3ce171f0e9f5cc86991480"},
+ {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16ab77bbeb596e14212e7bab8429f24c1579234a3a462105cda4a66904998664"},
+ {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc779e9e6f7fda81b3f9aa58e3a6091d49ad528b11ed19f6621408806204ad35"},
+ {file = "multidict-6.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ceef517eca3e03c1cceb22030a3e39cb399ac86bff4e426d4fc6ae49052cc60"},
+ {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:281af09f488903fde97923c7744bb001a9b23b039a909460d0f14edc7bf59706"},
+ {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52f2dffc8acaba9a2f27174c41c9e57f60b907bb9f096b36b1a1f3be71c6284d"},
+ {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b41156839806aecb3641f3208c0dafd3ac7775b9c4c422d82ee2a45c34ba81ca"},
+ {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3fc56f88cc98ef8139255cf8cd63eb2c586531e43310ff859d6bb3a6b51f1"},
+ {file = "multidict-6.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8316a77808c501004802f9beebde51c9f857054a0c871bd6da8280e718444449"},
+ {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f70b98cd94886b49d91170ef23ec5c0e8ebb6f242d734ed7ed677b24d50c82cf"},
+ {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bf6774e60d67a9efe02b3616fee22441d86fab4c6d335f9d2051d19d90a40063"},
+ {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e69924bfcdda39b722ef4d9aa762b2dd38e4632b3641b1d9a57ca9cd18f2f83a"},
+ {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6b181d8c23da913d4ff585afd1155a0e1194c0b50c54fcfe286f70cdaf2b7176"},
+ {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52509b5be062d9eafc8170e53026fbc54cf3b32759a23d07fd935fb04fc22d95"},
+ {file = "multidict-6.0.4-cp39-cp39-win32.whl", hash = "sha256:27c523fbfbdfd19c6867af7346332b62b586eed663887392cff78d614f9ec313"},
+ {file = "multidict-6.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:33029f5734336aa0d4c0384525da0387ef89148dc7191aae00ca5fb23d7aafc2"},
+ {file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"},
+]
+
[[package]]
name = "mypy"
version = "1.5.1"
description = "Optional static typing for Python"
+category = "dev"
optional = false
python-versions = ">=3.8"
files = [
@@ -731,6 +1280,7 @@ reports = ["lxml"]
name = "mypy-extensions"
version = "1.0.0"
description = "Type system extensions for programs checked with the mypy type checker."
+category = "main"
optional = false
python-versions = ">=3.5"
files = [
@@ -738,10 +1288,26 @@ files = [
{file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"},
]
+[[package]]
+name = "nodeenv"
+version = "1.8.0"
+description = "Node.js virtual environment builder"
+category = "dev"
+optional = false
+python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*"
+files = [
+ {file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"},
+ {file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"},
+]
+
+[package.dependencies]
+setuptools = "*"
+
[[package]]
name = "orjson"
version = "3.9.7"
description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy"
+category = "main"
optional = false
python-versions = ">=3.7"
files = [
@@ -811,6 +1377,7 @@ files = [
name = "packaging"
version = "23.1"
description = "Core utilities for Python packages"
+category = "main"
optional = false
python-versions = ">=3.7"
files = [
@@ -822,6 +1389,7 @@ files = [
name = "pathspec"
version = "0.11.2"
description = "Utility library for gitignore style pattern matching of file paths."
+category = "main"
optional = false
python-versions = ">=3.7"
files = [
@@ -833,6 +1401,7 @@ files = [
name = "platformdirs"
version = "3.10.0"
description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
+category = "main"
optional = false
python-versions = ">=3.7"
files = [
@@ -848,6 +1417,7 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co
name = "pluggy"
version = "1.3.0"
description = "plugin and hook calling mechanisms for python"
+category = "dev"
optional = false
python-versions = ">=3.8"
files = [
@@ -859,10 +1429,30 @@ files = [
dev = ["pre-commit", "tox"]
testing = ["pytest", "pytest-benchmark"]
+[[package]]
+name = "pre-commit"
+version = "3.4.0"
+description = "A framework for managing and maintaining multi-language pre-commit hooks."
+category = "dev"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "pre_commit-3.4.0-py2.py3-none-any.whl", hash = "sha256:96d529a951f8b677f730a7212442027e8ba53f9b04d217c4c67dc56c393ad945"},
+ {file = "pre_commit-3.4.0.tar.gz", hash = "sha256:6bbd5129a64cad4c0dfaeeb12cd8f7ea7e15b77028d985341478c8af3c759522"},
+]
+
+[package.dependencies]
+cfgv = ">=2.0.0"
+identify = ">=1.0.0"
+nodeenv = ">=0.11.1"
+pyyaml = ">=5.1"
+virtualenv = ">=20.10.0"
+
[[package]]
name = "prometheus-client"
version = "0.16.0"
description = "Python client for the Prometheus monitoring system."
+category = "main"
optional = false
python-versions = ">=3.6"
files = [
@@ -877,6 +1467,7 @@ twisted = ["twisted"]
name = "pydantic"
version = "1.10.12"
description = "Data validation and settings management using python type hints"
+category = "main"
optional = false
python-versions = ">=3.7"
files = [
@@ -929,6 +1520,7 @@ email = ["email-validator (>=1.0.3)"]
name = "pygments"
version = "2.16.1"
description = "Pygments is a syntax highlighting package written in Python."
+category = "main"
optional = false
python-versions = ">=3.7"
files = [
@@ -943,6 +1535,7 @@ plugins = ["importlib-metadata"]
name = "pylint"
version = "2.17.5"
description = "python code static checker"
+category = "dev"
optional = false
python-versions = ">=3.7.2"
files = [
@@ -971,6 +1564,7 @@ testutils = ["gitpython (>3)"]
name = "pytest"
version = "7.4.2"
description = "pytest: simple powerful testing with Python"
+category = "dev"
optional = false
python-versions = ">=3.7"
files = [
@@ -989,10 +1583,30 @@ tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""}
[package.extras]
testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
+[[package]]
+name = "pytest-asyncio"
+version = "0.21.1"
+description = "Pytest support for asyncio"
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "pytest-asyncio-0.21.1.tar.gz", hash = "sha256:40a7eae6dded22c7b604986855ea48400ab15b069ae38116e8c01238e9eeb64d"},
+ {file = "pytest_asyncio-0.21.1-py3-none-any.whl", hash = "sha256:8666c1c8ac02631d7c51ba282e0c69a8a452b211ffedf2599099845da5c5c37b"},
+]
+
+[package.dependencies]
+pytest = ">=7.0.0"
+
+[package.extras]
+docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"]
+testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy (>=0.931)", "pytest-trio (>=0.7.0)"]
+
[[package]]
name = "pytest-cov"
version = "4.1.0"
description = "Pytest plugin for measuring coverage."
+category = "dev"
optional = false
python-versions = ">=3.7"
files = [
@@ -1007,10 +1621,50 @@ pytest = ">=4.6"
[package.extras]
testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"]
+[[package]]
+name = "pytest-env"
+version = "1.0.1"
+description = "py.test plugin that allows you to add environment variables."
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "pytest_env-1.0.1-py3-none-any.whl", hash = "sha256:e8faf927c6fcdbbc8fe3317506acc116713c9708d01652a0fd945f9ae27b71aa"},
+ {file = "pytest_env-1.0.1.tar.gz", hash = "sha256:603fe216e8e03a5d134989cb41317c59aabef013d2250c71b864ab0798fbe6f6"},
+]
+
+[package.dependencies]
+pytest = ">=7.3.1"
+
+[package.extras]
+test = ["coverage (>=7.2.7)", "pytest-mock (>=3.10)"]
+
+[[package]]
+name = "pytest-xdist"
+version = "3.3.1"
+description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs"
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "pytest-xdist-3.3.1.tar.gz", hash = "sha256:d5ee0520eb1b7bcca50a60a518ab7a7707992812c578198f8b44fdfac78e8c93"},
+ {file = "pytest_xdist-3.3.1-py3-none-any.whl", hash = "sha256:ff9daa7793569e6a68544850fd3927cd257cc03a7ef76c95e86915355e82b5f2"},
+]
+
+[package.dependencies]
+execnet = ">=1.1"
+pytest = ">=6.2.0"
+
+[package.extras]
+psutil = ["psutil (>=3.0)"]
+setproctitle = ["setproctitle"]
+testing = ["filelock"]
+
[[package]]
name = "python-dateutil"
version = "2.8.2"
description = "Extensions to the standard Python datetime module"
+category = "main"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
files = [
@@ -1025,6 +1679,7 @@ six = ">=1.5"
name = "python-slugify"
version = "5.0.2"
description = "A Python Slugify application that handles Unicode"
+category = "main"
optional = false
python-versions = ">=3.6"
files = [
@@ -1042,6 +1697,7 @@ unidecode = ["Unidecode (>=1.1.1)"]
name = "pyyaml"
version = "6.0.1"
description = "YAML parser and emitter for Python"
+category = "main"
optional = false
python-versions = ">=3.6"
files = [
@@ -1101,6 +1757,7 @@ files = [
name = "requests"
version = "2.31.0"
description = "Python HTTP for Humans."
+category = "main"
optional = false
python-versions = ">=3.7"
files = [
@@ -1122,6 +1779,7 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
name = "requests-mock"
version = "1.11.0"
description = "Mock out responses from the requests package"
+category = "dev"
optional = false
python-versions = "*"
files = [
@@ -1141,6 +1799,7 @@ test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "tes
name = "rich"
version = "12.6.0"
description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal"
+category = "main"
optional = false
python-versions = ">=3.6.3,<4.0.0"
files = [
@@ -1157,34 +1816,36 @@ jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"]
[[package]]
name = "ruff"
-version = "0.0.289"
+version = "0.0.290"
description = "An extremely fast Python linter, written in Rust."
+category = "dev"
optional = false
python-versions = ">=3.7"
files = [
- {file = "ruff-0.0.289-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:c9a89d748e90c840bac9c37afe90cf13a5bfd460ca02ea93dad9d7bee3af03b4"},
- {file = "ruff-0.0.289-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:7f7396c6ea01ba332a6ad9d47642bac25d16bd2076aaa595b001f58b2f32ff05"},
- {file = "ruff-0.0.289-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7180de86c8ecd39624dec1699136f941c07e723201b4ce979bec9e7c67b40ad2"},
- {file = "ruff-0.0.289-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:73f37c65508203dd01a539926375a10243769c20d4fcab3fa6359cd3fbfc54b7"},
- {file = "ruff-0.0.289-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c14abcd7563b5c80be2dd809eeab20e4aa716bf849860b60a22d87ddf19eb88"},
- {file = "ruff-0.0.289-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:91b6d63b6b46d4707916472c91baa87aa0592e73f62a80ff55efdf6c0668cfd6"},
- {file = "ruff-0.0.289-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6479b8c4be3c36046c6c92054762b276fa0fddb03f6b9a310fbbf4c4951267fd"},
- {file = "ruff-0.0.289-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c5424318c254bcb091cb67e140ec9b9f7122074e100b06236f252923fb41e767"},
- {file = "ruff-0.0.289-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4daa90865796aedcedf0d8897fdd4cd09bf0ddd3504529a4ccf211edcaff3c7d"},
- {file = "ruff-0.0.289-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:8057e8ab0016c13b9419bad119e854f881e687bd96bc5e2d52c8baac0f278a44"},
- {file = "ruff-0.0.289-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:7eebfab2e6a6991908ff1bf82f2dc1e5095fc7e316848e62124526837b445f4d"},
- {file = "ruff-0.0.289-py3-none-musllinux_1_2_i686.whl", hash = "sha256:ebc7af550018001a7fb39ca22cdce20e1a0de4388ea4a007eb5c822f6188c297"},
- {file = "ruff-0.0.289-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:6e4e6eccb753efe760ba354fc8e9f783f6bba71aa9f592756f5bd0d78db898ed"},
- {file = "ruff-0.0.289-py3-none-win32.whl", hash = "sha256:bbb3044f931c09cf17dbe5b339896eece0d6ac10c9a86e172540fcdb1974f2b7"},
- {file = "ruff-0.0.289-py3-none-win_amd64.whl", hash = "sha256:6d043c5456b792be2615a52f16056c3cf6c40506ce1f2d6f9d3083cfcb9eeab6"},
- {file = "ruff-0.0.289-py3-none-win_arm64.whl", hash = "sha256:04a720bcca5e987426bb14ad8b9c6f55e259ea774da1cbeafe71569744cfd20a"},
- {file = "ruff-0.0.289.tar.gz", hash = "sha256:2513f853b0fc42f0339b7ab0d2751b63ce7a50a0032d2689b54b2931b3b866d7"},
+ {file = "ruff-0.0.290-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:0e2b09ac4213b11a3520221083866a5816616f3ae9da123037b8ab275066fbac"},
+ {file = "ruff-0.0.290-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:4ca6285aa77b3d966be32c9a3cd531655b3d4a0171e1f9bf26d66d0372186767"},
+ {file = "ruff-0.0.290-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35e3550d1d9f2157b0fcc77670f7bb59154f223bff281766e61bdd1dd854e0c5"},
+ {file = "ruff-0.0.290-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d748c8bd97874f5751aed73e8dde379ce32d16338123d07c18b25c9a2796574a"},
+ {file = "ruff-0.0.290-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:982af5ec67cecd099e2ef5e238650407fb40d56304910102d054c109f390bf3c"},
+ {file = "ruff-0.0.290-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:bbd37352cea4ee007c48a44c9bc45a21f7ba70a57edfe46842e346651e2b995a"},
+ {file = "ruff-0.0.290-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d9be6351b7889462912e0b8185a260c0219c35dfd920fb490c7f256f1d8313e"},
+ {file = "ruff-0.0.290-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75cdc7fe32dcf33b7cec306707552dda54632ac29402775b9e212a3c16aad5e6"},
+ {file = "ruff-0.0.290-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb07f37f7aecdbbc91d759c0c09870ce0fb3eed4025eebedf9c4b98c69abd527"},
+ {file = "ruff-0.0.290-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:2ab41bc0ba359d3f715fc7b705bdeef19c0461351306b70a4e247f836b9350ed"},
+ {file = "ruff-0.0.290-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:150bf8050214cea5b990945b66433bf9a5e0cef395c9bc0f50569e7de7540c86"},
+ {file = "ruff-0.0.290-py3-none-musllinux_1_2_i686.whl", hash = "sha256:75386ebc15fe5467248c039f5bf6a0cfe7bfc619ffbb8cd62406cd8811815fca"},
+ {file = "ruff-0.0.290-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:ac93eadf07bc4ab4c48d8bb4e427bf0f58f3a9c578862eb85d99d704669f5da0"},
+ {file = "ruff-0.0.290-py3-none-win32.whl", hash = "sha256:461fbd1fb9ca806d4e3d5c745a30e185f7cf3ca77293cdc17abb2f2a990ad3f7"},
+ {file = "ruff-0.0.290-py3-none-win_amd64.whl", hash = "sha256:f1f49f5ec967fd5778813780b12a5650ab0ebcb9ddcca28d642c689b36920796"},
+ {file = "ruff-0.0.290-py3-none-win_arm64.whl", hash = "sha256:ae5a92dfbdf1f0c689433c223f8dac0782c2b2584bd502dfdbc76475669f1ba1"},
+ {file = "ruff-0.0.290.tar.gz", hash = "sha256:949fecbc5467bb11b8db810a7fa53c7e02633856ee6bd1302b2f43adcd71b88d"},
]
[[package]]
name = "s3path"
version = "0.5.0"
description = ""
+category = "main"
optional = false
python-versions = ">=3.8"
files = [
@@ -1200,6 +1861,7 @@ smart-open = "*"
name = "s3transfer"
version = "0.6.2"
description = "An Amazon S3 Transfer Manager"
+category = "main"
optional = false
python-versions = ">= 3.7"
files = [
@@ -1215,13 +1877,14 @@ crt = ["botocore[crt] (>=1.20.29,<2.0a.0)"]
[[package]]
name = "sentry-sdk"
-version = "1.30.0"
+version = "1.31.0"
description = "Python client for Sentry (https://sentry.io)"
+category = "main"
optional = false
python-versions = "*"
files = [
- {file = "sentry-sdk-1.30.0.tar.gz", hash = "sha256:7dc873b87e1faf4d00614afd1058bfa1522942f33daef8a59f90de8ed75cd10c"},
- {file = "sentry_sdk-1.30.0-py2.py3-none-any.whl", hash = "sha256:2e53ad63f96bb9da6570ba2e755c267e529edcf58580a2c0d2a11ef26e1e678b"},
+ {file = "sentry-sdk-1.31.0.tar.gz", hash = "sha256:6de2e88304873484207fed836388e422aeff000609b104c802749fd89d56ba5b"},
+ {file = "sentry_sdk-1.31.0-py2.py3-none-any.whl", hash = "sha256:64a7141005fb775b9db298a30de93e3b83e0ddd1232dc6f36eb38aebc1553291"},
]
[package.dependencies]
@@ -1231,10 +1894,12 @@ urllib3 = {version = ">=1.26.11", markers = "python_version >= \"3.6\""}
[package.extras]
aiohttp = ["aiohttp (>=3.5)"]
arq = ["arq (>=0.23)"]
+asyncpg = ["asyncpg (>=0.23)"]
beam = ["apache-beam (>=2.12)"]
bottle = ["bottle (>=0.12.13)"]
celery = ["celery (>=3)"]
chalice = ["chalice (>=1.16.0)"]
+clickhouse-driver = ["clickhouse-driver (>=0.2.0)"]
django = ["django (>=1.8)"]
falcon = ["falcon (>=1.4)"]
fastapi = ["fastapi (>=0.79.0)"]
@@ -1256,10 +1921,28 @@ starlette = ["starlette (>=0.19.1)"]
starlite = ["starlite (>=1.48)"]
tornado = ["tornado (>=5)"]
+[[package]]
+name = "setuptools"
+version = "68.2.2"
+description = "Easily download, build, install, upgrade, and uninstall Python packages"
+category = "dev"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "setuptools-68.2.2-py3-none-any.whl", hash = "sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a"},
+ {file = "setuptools-68.2.2.tar.gz", hash = "sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87"},
+]
+
+[package.extras]
+docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"]
+testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
+testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"]
+
[[package]]
name = "shellingham"
version = "1.5.3"
description = "Tool to Detect Surrounding Shell"
+category = "main"
optional = false
python-versions = ">=3.7"
files = [
@@ -1271,6 +1954,7 @@ files = [
name = "six"
version = "1.16.0"
description = "Python 2 and 3 compatibility utilities"
+category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
files = [
@@ -1282,6 +1966,7 @@ files = [
name = "smart-open"
version = "6.4.0"
description = "Utils for streaming large files (S3, HDFS, GCS, Azure Blob Storage, gzip, bz2...)"
+category = "main"
optional = false
python-versions = ">=3.6,<4.0"
files = [
@@ -1303,6 +1988,7 @@ webhdfs = ["requests"]
name = "tenacity"
version = "8.2.3"
description = "Retry code until it succeeds"
+category = "main"
optional = false
python-versions = ">=3.7"
files = [
@@ -1317,6 +2003,7 @@ doc = ["reno", "sphinx", "tornado (>=4.5)"]
name = "text-unidecode"
version = "1.3"
description = "The most basic Text::Unidecode port"
+category = "main"
optional = false
python-versions = "*"
files = [
@@ -1328,6 +2015,7 @@ files = [
name = "tomli"
version = "2.0.1"
description = "A lil' TOML parser"
+category = "main"
optional = false
python-versions = ">=3.7"
files = [
@@ -1339,6 +2027,7 @@ files = [
name = "tomlkit"
version = "0.12.1"
description = "Style preserving TOML library"
+category = "dev"
optional = false
python-versions = ">=3.7"
files = [
@@ -1350,6 +2039,7 @@ files = [
name = "typer"
version = "0.7.0"
description = "Typer, build great CLIs. Easy to code. Based on Python type hints."
+category = "main"
optional = false
python-versions = ">=3.6"
files = [
@@ -1369,21 +2059,86 @@ dev = ["autoflake (>=1.3.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "pre-commit (>=2
doc = ["cairosvg (>=2.5.2,<3.0.0)", "mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pillow (>=9.3.0,<10.0.0)"]
test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.910)", "pytest (>=4.4.0,<8.0.0)", "pytest-cov (>=2.10.0,<5.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "pytest-xdist (>=1.32.0,<4.0.0)", "rich (>=10.11.0,<13.0.0)", "shellingham (>=1.3.0,<2.0.0)"]
+[[package]]
+name = "types-aiofiles"
+version = "23.2.0.0"
+description = "Typing stubs for aiofiles"
+category = "dev"
+optional = false
+python-versions = "*"
+files = [
+ {file = "types-aiofiles-23.2.0.0.tar.gz", hash = "sha256:b6a7127bd232e0802532837b84140b1cd5df19ee60bea3a5699720d2b583361b"},
+ {file = "types_aiofiles-23.2.0.0-py3-none-any.whl", hash = "sha256:5d6719e8148cb2a9c4ea46dad86d50d3b675c46a940adca698533a8d2216d53d"},
+]
+
+[[package]]
+name = "types-python-slugify"
+version = "8.0.0.3"
+description = "Typing stubs for python-slugify"
+category = "dev"
+optional = false
+python-versions = "*"
+files = [
+ {file = "types-python-slugify-8.0.0.3.tar.gz", hash = "sha256:868e6610ab9a01c01b2ccc1b982363e694d6bbb4fcf32e0d82688c89dceb4e2c"},
+ {file = "types_python_slugify-8.0.0.3-py3-none-any.whl", hash = "sha256:2353c161c79ab6cce955b50720c6cd03586ec297558122236d130e4a19f21209"},
+]
+
+[[package]]
+name = "types-pyyaml"
+version = "6.0.12.11"
+description = "Typing stubs for PyYAML"
+category = "dev"
+optional = false
+python-versions = "*"
+files = [
+ {file = "types-PyYAML-6.0.12.11.tar.gz", hash = "sha256:7d340b19ca28cddfdba438ee638cd4084bde213e501a3978738543e27094775b"},
+ {file = "types_PyYAML-6.0.12.11-py3-none-any.whl", hash = "sha256:a461508f3096d1d5810ec5ab95d7eeecb651f3a15b71959999988942063bf01d"},
+]
+
+[[package]]
+name = "types-requests"
+version = "2.31.0.2"
+description = "Typing stubs for requests"
+category = "dev"
+optional = false
+python-versions = "*"
+files = [
+ {file = "types-requests-2.31.0.2.tar.gz", hash = "sha256:6aa3f7faf0ea52d728bb18c0a0d1522d9bfd8c72d26ff6f61bfc3d06a411cf40"},
+ {file = "types_requests-2.31.0.2-py3-none-any.whl", hash = "sha256:56d181c85b5925cbc59f4489a57e72a8b2166f18273fd8ba7b6fe0c0b986f12a"},
+]
+
+[package.dependencies]
+types-urllib3 = "*"
+
+[[package]]
+name = "types-urllib3"
+version = "1.26.25.14"
+description = "Typing stubs for urllib3"
+category = "dev"
+optional = false
+python-versions = "*"
+files = [
+ {file = "types-urllib3-1.26.25.14.tar.gz", hash = "sha256:229b7f577c951b8c1b92c1bc2b2fdb0b49847bd2af6d1cc2a2e3dd340f3bda8f"},
+ {file = "types_urllib3-1.26.25.14-py3-none-any.whl", hash = "sha256:9683bbb7fb72e32bfe9d2be6e04875fbe1b3eeec3cbb4ea231435aa7fd6b4f0e"},
+]
+
[[package]]
name = "typing-extensions"
-version = "4.7.1"
-description = "Backported and Experimental Type Hints for Python 3.7+"
+version = "4.8.0"
+description = "Backported and Experimental Type Hints for Python 3.8+"
+category = "main"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"},
- {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"},
+ {file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"},
+ {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"},
]
[[package]]
name = "unittest-xml-reporting"
version = "3.2.0"
description = "unittest-based test runner with Ant/JUnit like XML reporting."
+category = "dev"
optional = false
python-versions = ">=3.7"
files = [
@@ -1398,6 +2153,7 @@ lxml = "*"
name = "urllib3"
version = "1.26.16"
description = "HTTP library with thread-safe connection pooling, file post, and more."
+category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
files = [
@@ -1410,10 +2166,47 @@ brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"]
secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"]
socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
+[[package]]
+name = "virtualenv"
+version = "20.24.5"
+description = "Virtual Python Environment builder"
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "virtualenv-20.24.5-py3-none-any.whl", hash = "sha256:b80039f280f4919c77b30f1c23294ae357c4c8701042086e3fc005963e4e537b"},
+ {file = "virtualenv-20.24.5.tar.gz", hash = "sha256:e8361967f6da6fbdf1426483bfe9fca8287c242ac0bc30429905721cefbff752"},
+]
+
+[package.dependencies]
+distlib = ">=0.3.7,<1"
+filelock = ">=3.12.2,<4"
+platformdirs = ">=3.9.1,<4"
+
+[package.extras]
+docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"]
+test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"]
+
+[[package]]
+name = "win32-setctime"
+version = "1.1.0"
+description = "A small Python utility to set file creation time on Windows"
+category = "main"
+optional = true
+python-versions = ">=3.5"
+files = [
+ {file = "win32_setctime-1.1.0-py3-none-any.whl", hash = "sha256:231db239e959c2fe7eb1d7dc129f11172354f98361c4fa2d6d2d7e278baa8aad"},
+ {file = "win32_setctime-1.1.0.tar.gz", hash = "sha256:15cf5750465118d6929ae4de4eb46e8edae9a5634350c01ba582df868e932cb2"},
+]
+
+[package.extras]
+dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"]
+
[[package]]
name = "wrapt"
version = "1.15.0"
description = "Module for decorators, wrappers and monkey patching."
+category = "main"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
files = [
@@ -1494,7 +2287,102 @@ files = [
{file = "wrapt-1.15.0.tar.gz", hash = "sha256:d06730c6aed78cee4126234cf2d071e01b44b915e725a6cb439a879ec9754a3a"},
]
+[[package]]
+name = "yarl"
+version = "1.9.2"
+description = "Yet another URL library"
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c2ad583743d16ddbdf6bb14b5cd76bf43b0d0006e918809d5d4ddf7bde8dd82"},
+ {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:82aa6264b36c50acfb2424ad5ca537a2060ab6de158a5bd2a72a032cc75b9eb8"},
+ {file = "yarl-1.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c0c77533b5ed4bcc38e943178ccae29b9bcf48ffd1063f5821192f23a1bd27b9"},
+ {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee4afac41415d52d53a9833ebae7e32b344be72835bbb589018c9e938045a560"},
+ {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9bf345c3a4f5ba7f766430f97f9cc1320786f19584acc7086491f45524a551ac"},
+ {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a96c19c52ff442a808c105901d0bdfd2e28575b3d5f82e2f5fd67e20dc5f4ea"},
+ {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:891c0e3ec5ec881541f6c5113d8df0315ce5440e244a716b95f2525b7b9f3608"},
+ {file = "yarl-1.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c3a53ba34a636a256d767c086ceb111358876e1fb6b50dfc4d3f4951d40133d5"},
+ {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:566185e8ebc0898b11f8026447eacd02e46226716229cea8db37496c8cdd26e0"},
+ {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2b0738fb871812722a0ac2154be1f049c6223b9f6f22eec352996b69775b36d4"},
+ {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:32f1d071b3f362c80f1a7d322bfd7b2d11e33d2adf395cc1dd4df36c9c243095"},
+ {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:e9fdc7ac0d42bc3ea78818557fab03af6181e076a2944f43c38684b4b6bed8e3"},
+ {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:56ff08ab5df8429901ebdc5d15941b59f6253393cb5da07b4170beefcf1b2528"},
+ {file = "yarl-1.9.2-cp310-cp310-win32.whl", hash = "sha256:8ea48e0a2f931064469bdabca50c2f578b565fc446f302a79ba6cc0ee7f384d3"},
+ {file = "yarl-1.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:50f33040f3836e912ed16d212f6cc1efb3231a8a60526a407aeb66c1c1956dde"},
+ {file = "yarl-1.9.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:646d663eb2232d7909e6601f1a9107e66f9791f290a1b3dc7057818fe44fc2b6"},
+ {file = "yarl-1.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aff634b15beff8902d1f918012fc2a42e0dbae6f469fce134c8a0dc51ca423bb"},
+ {file = "yarl-1.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a83503934c6273806aed765035716216cc9ab4e0364f7f066227e1aaea90b8d0"},
+ {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b25322201585c69abc7b0e89e72790469f7dad90d26754717f3310bfe30331c2"},
+ {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22a94666751778629f1ec4280b08eb11815783c63f52092a5953faf73be24191"},
+ {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ec53a0ea2a80c5cd1ab397925f94bff59222aa3cf9c6da938ce05c9ec20428d"},
+ {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:159d81f22d7a43e6eabc36d7194cb53f2f15f498dbbfa8edc8a3239350f59fe7"},
+ {file = "yarl-1.9.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:832b7e711027c114d79dffb92576acd1bd2decc467dec60e1cac96912602d0e6"},
+ {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:95d2ecefbcf4e744ea952d073c6922e72ee650ffc79028eb1e320e732898d7e8"},
+ {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d4e2c6d555e77b37288eaf45b8f60f0737c9efa3452c6c44626a5455aeb250b9"},
+ {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:783185c75c12a017cc345015ea359cc801c3b29a2966c2655cd12b233bf5a2be"},
+ {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:b8cc1863402472f16c600e3e93d542b7e7542a540f95c30afd472e8e549fc3f7"},
+ {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:822b30a0f22e588b32d3120f6d41e4ed021806418b4c9f0bc3048b8c8cb3f92a"},
+ {file = "yarl-1.9.2-cp311-cp311-win32.whl", hash = "sha256:a60347f234c2212a9f0361955007fcf4033a75bf600a33c88a0a8e91af77c0e8"},
+ {file = "yarl-1.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:be6b3fdec5c62f2a67cb3f8c6dbf56bbf3f61c0f046f84645cd1ca73532ea051"},
+ {file = "yarl-1.9.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:38a3928ae37558bc1b559f67410df446d1fbfa87318b124bf5032c31e3447b74"},
+ {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac9bb4c5ce3975aeac288cfcb5061ce60e0d14d92209e780c93954076c7c4367"},
+ {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3da8a678ca8b96c8606bbb8bfacd99a12ad5dd288bc6f7979baddd62f71c63ef"},
+ {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13414591ff516e04fcdee8dc051c13fd3db13b673c7a4cb1350e6b2ad9639ad3"},
+ {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf74d08542c3a9ea97bb8f343d4fcbd4d8f91bba5ec9d5d7f792dbe727f88938"},
+ {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e7221580dc1db478464cfeef9b03b95c5852cc22894e418562997df0d074ccc"},
+ {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:494053246b119b041960ddcd20fd76224149cfea8ed8777b687358727911dd33"},
+ {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:52a25809fcbecfc63ac9ba0c0fb586f90837f5425edfd1ec9f3372b119585e45"},
+ {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:e65610c5792870d45d7b68c677681376fcf9cc1c289f23e8e8b39c1485384185"},
+ {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:1b1bba902cba32cdec51fca038fd53f8beee88b77efc373968d1ed021024cc04"},
+ {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:662e6016409828ee910f5d9602a2729a8a57d74b163c89a837de3fea050c7582"},
+ {file = "yarl-1.9.2-cp37-cp37m-win32.whl", hash = "sha256:f364d3480bffd3aa566e886587eaca7c8c04d74f6e8933f3f2c996b7f09bee1b"},
+ {file = "yarl-1.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6a5883464143ab3ae9ba68daae8e7c5c95b969462bbe42e2464d60e7e2698368"},
+ {file = "yarl-1.9.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5610f80cf43b6202e2c33ba3ec2ee0a2884f8f423c8f4f62906731d876ef4fac"},
+ {file = "yarl-1.9.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b9a4e67ad7b646cd6f0938c7ebfd60e481b7410f574c560e455e938d2da8e0f4"},
+ {file = "yarl-1.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:83fcc480d7549ccebe9415d96d9263e2d4226798c37ebd18c930fce43dfb9574"},
+ {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fcd436ea16fee7d4207c045b1e340020e58a2597301cfbcfdbe5abd2356c2fb"},
+ {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84e0b1599334b1e1478db01b756e55937d4614f8654311eb26012091be109d59"},
+ {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3458a24e4ea3fd8930e934c129b676c27452e4ebda80fbe47b56d8c6c7a63a9e"},
+ {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:838162460b3a08987546e881a2bfa573960bb559dfa739e7800ceeec92e64417"},
+ {file = "yarl-1.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4e2d08f07a3d7d3e12549052eb5ad3eab1c349c53ac51c209a0e5991bbada78"},
+ {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:de119f56f3c5f0e2fb4dee508531a32b069a5f2c6e827b272d1e0ff5ac040333"},
+ {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:149ddea5abf329752ea5051b61bd6c1d979e13fbf122d3a1f9f0c8be6cb6f63c"},
+ {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:674ca19cbee4a82c9f54e0d1eee28116e63bc6fd1e96c43031d11cbab8b2afd5"},
+ {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:9b3152f2f5677b997ae6c804b73da05a39daa6a9e85a512e0e6823d81cdad7cc"},
+ {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5415d5a4b080dc9612b1b63cba008db84e908b95848369aa1da3686ae27b6d2b"},
+ {file = "yarl-1.9.2-cp38-cp38-win32.whl", hash = "sha256:f7a3d8146575e08c29ed1cd287068e6d02f1c7bdff8970db96683b9591b86ee7"},
+ {file = "yarl-1.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:63c48f6cef34e6319a74c727376e95626f84ea091f92c0250a98e53e62c77c72"},
+ {file = "yarl-1.9.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:75df5ef94c3fdc393c6b19d80e6ef1ecc9ae2f4263c09cacb178d871c02a5ba9"},
+ {file = "yarl-1.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c027a6e96ef77d401d8d5a5c8d6bc478e8042f1e448272e8d9752cb0aff8b5c8"},
+ {file = "yarl-1.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3b078dbe227f79be488ffcfc7a9edb3409d018e0952cf13f15fd6512847f3f7"},
+ {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59723a029760079b7d991a401386390c4be5bfec1e7dd83e25a6a0881859e716"},
+ {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b03917871bf859a81ccb180c9a2e6c1e04d2f6a51d953e6a5cdd70c93d4e5a2a"},
+ {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1012fa63eb6c032f3ce5d2171c267992ae0c00b9e164efe4d73db818465fac3"},
+ {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a74dcbfe780e62f4b5a062714576f16c2f3493a0394e555ab141bf0d746bb955"},
+ {file = "yarl-1.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c56986609b057b4839968ba901944af91b8e92f1725d1a2d77cbac6972b9ed1"},
+ {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2c315df3293cd521033533d242d15eab26583360b58f7ee5d9565f15fee1bef4"},
+ {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:b7232f8dfbd225d57340e441d8caf8652a6acd06b389ea2d3222b8bc89cbfca6"},
+ {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:53338749febd28935d55b41bf0bcc79d634881195a39f6b2f767870b72514caf"},
+ {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:066c163aec9d3d073dc9ffe5dd3ad05069bcb03fcaab8d221290ba99f9f69ee3"},
+ {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8288d7cd28f8119b07dd49b7230d6b4562f9b61ee9a4ab02221060d21136be80"},
+ {file = "yarl-1.9.2-cp39-cp39-win32.whl", hash = "sha256:b124e2a6d223b65ba8768d5706d103280914d61f5cae3afbc50fc3dfcc016623"},
+ {file = "yarl-1.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:61016e7d582bc46a5378ffdd02cd0314fb8ba52f40f9cf4d9a5e7dbef88dee18"},
+ {file = "yarl-1.9.2.tar.gz", hash = "sha256:04ab9d4b9f587c06d801c2abfe9317b77cdf996c65a90d5e84ecc45010823571"},
+]
+
+[package.dependencies]
+idna = ">=2.0"
+multidict = ">=4.0"
+
+[extras]
+all = ["aiohttp", "aiolimiter", "aiobotocore", "aiofiles", "aiocsv", "loguru"]
+async-aws = ["aiobotocore"]
+async-files = ["aiofiles", "aiocsv"]
+async-http = ["aiohttp", "aiolimiter", "aiofiles"]
+logging = ["loguru"]
+
[metadata]
lock-version = "2.0"
python-versions = ">=3.10,<3.12"
-content-hash = "49cdb80a9a37e60fc7a1f31c5477d560d6a7ba5bb417ae6fb1d997d6a7602bbd"
+content-hash = "20f16b3ca9769f250fa28eca0c60975ee310dbd1ec23de1e0fba737f2ad65a01"
diff --git a/pyproject.toml b/pyproject.toml
index 2e95f18..e4ae7e4 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -44,13 +44,55 @@ PyYAML = "^6.0"
Jinja2 = "^3.0.3"
black = "*" # To format files in cli tools
prometheus-client = "^0.16.0"
+aiohttp = { version = "^3.8.4", optional = true }
+aiolimiter = { version = "^1.1.0", optional = true }
+aiobotocore = { version = "^2.5.2", optional = true }
+aiofiles = { version = "^23.1.0", optional = true }
+aiocsv = { version = "^1.2.4", optional = true }
+loguru = { version = "^0.7.0", optional = true }
[tool.poetry.group.dev.dependencies]
unittest-xml-reporting = "^3"
pylint = "*"
pytest = "*"
pytest-cov = "*"
+pytest-asyncio = "*"
+pytest-xdist = "*"
+pytest-env = "*"
requests-mock = "^1.9"
+faker = "^19.0.0"
+aioresponses = "^0.7.4"
+types-aiofiles = "^23.1.0.4"
+types-requests = "^2.31.0.1"
+types-pyyaml = "^6.0.12.10"
+types-python-slugify = "^8.0.0.2"
+pre-commit = "^3.3.3"
+
+[tool.poetry.extras]
+all = [
+ "aiohttp",
+ "aiolimiter",
+ "aiobotocore",
+ "aiofiles",
+ "aiocsv",
+ "loguru",
+]
+async-aws = [
+ "aiobotocore",
+]
+async-http = [
+ "aiohttp",
+ "aiolimiter",
+ "aiofiles",
+]
+async-files = [
+ "aiofiles",
+ "aiocsv"
+]
+logging = [
+ "loguru"
+]
+
[tool.poetry.group.lint.dependencies]
ruff = "*"
@@ -63,7 +105,15 @@ force-exclude = "tests/expectations/sample_module/main.py|sekoia_automation/scri
[tool.pytest.ini_options]
minversion = "6.0"
-addopts = "-ra"
+addopts = '''
+ --asyncio-mode=auto
+ --cache-clear
+ --cov=sekoia_automation
+ --cov-report=html
+ --cov-report=term-missing:skip-covered
+ --cov-fail-under=90
+ -ra
+'''
testpaths = [
"tests",
]
@@ -72,6 +122,7 @@ testpaths = [
select = ["A", "ARG", "E", "F", "I", "N", "RUF", "UP", "W"]
exclude = [
"tests/expectations/sample_module/main.py",
+ "tests/aio/",
"sekoia_automation/scripts/new_module/template/"
]
@@ -82,5 +133,8 @@ exclude = [
python_version = "3.11"
ignore_missing_imports = true
show_column_numbers = true
-exclude = "sekoia_automation/scripts/new_module/template/"
+exclude = [
+ "sekoia_automation/scripts/new_module/template/",
+ "tests/"
+]
disable_error_code = "annotation-unchecked"
diff --git a/sekoia_automation/aio/__init__.py b/sekoia_automation/aio/__init__.py
new file mode 100644
index 0000000..758bb58
--- /dev/null
+++ b/sekoia_automation/aio/__init__.py
@@ -0,0 +1 @@
+"""Package contains all utilities and wrappers for asynchronous mode."""
diff --git a/sekoia_automation/aio/connector.py b/sekoia_automation/aio/connector.py
new file mode 100644
index 0000000..9f9586f
--- /dev/null
+++ b/sekoia_automation/aio/connector.py
@@ -0,0 +1,146 @@
+"""Contains connector with async version."""
+from abc import ABC
+from asyncio import AbstractEventLoop, get_event_loop
+from collections.abc import AsyncGenerator
+from contextlib import asynccontextmanager
+from datetime import datetime
+from urllib.parse import urljoin
+
+from aiohttp import ClientSession
+from aiolimiter import AsyncLimiter
+
+from sekoia_automation.connector import Connector, DefaultConnectorConfiguration
+
+
+class AsyncConnector(Connector, ABC):
+ """Async version of Connector."""
+
+ configuration: DefaultConnectorConfiguration
+
+ _event_loop: AbstractEventLoop
+
+ _session: ClientSession | None = None
+ _rate_limiter: AsyncLimiter | None = None
+
+ def __init__(self, event_loop: AbstractEventLoop | None = None, *args, **kwargs):
+ """
+ Initialize AsyncConnector.
+
+ Optionally accepts event_loop to use, otherwise will use default event loop.
+
+ Args:
+ event_loop: AbstractEventLoop | None
+ """
+ super().__init__(*args, **kwargs)
+
+ self._event_loop = event_loop or get_event_loop()
+
+ @classmethod
+ def set_client_session(cls, session: ClientSession) -> None:
+ """
+ Set client session.
+
+ Args:
+ session: ClientSession
+ """
+ cls._session = session
+
+ @classmethod
+ def set_rate_limiter(cls, rate_limiter: AsyncLimiter) -> None:
+ """
+ Set rate limiter.
+
+ Args:
+ rate_limiter:
+ """
+ cls._rate_limiter = rate_limiter
+
+ @classmethod
+ def get_rate_limiter(cls) -> AsyncLimiter:
+ """
+ Get or initialize rate limiter.
+
+ Returns:
+ AsyncLimiter:
+ """
+ if cls._rate_limiter is None:
+ cls._rate_limiter = AsyncLimiter(1, 1)
+
+ return cls._rate_limiter
+
+ @classmethod
+ @asynccontextmanager
+ async def session(cls) -> AsyncGenerator[ClientSession, None]: # pragma: no cover
+ """
+ Get or initialize client session if it is not initialized yet.
+
+ Returns:
+ ClientSession:
+ """
+ if cls._session is None:
+ cls._session = ClientSession()
+
+ async with cls.get_rate_limiter():
+ yield cls._session
+
+ async def push_data_to_intakes(
+ self, events: list[str]
+ ) -> list[str]: # pragma: no cover
+ """
+ Custom method to push events to intakes.
+
+ Args:
+ events: list[str]
+
+ Returns:
+ list[str]:
+ """
+ self._last_events_time = datetime.utcnow()
+ batch_api = urljoin(self.configuration.intake_server, "/batch")
+
+ self.log(f"Push {len(events)} events to intakes")
+
+ result_ids = []
+
+ chunks = self._chunk_events(events, self.configuration.chunk_size)
+
+ async with self.session() as session:
+ for chunk_index, chunk in enumerate(chunks):
+ self.log(
+ "Start to push chunk {} with data count {} to intakes".format(
+ chunk_index,
+ len(chunk),
+ )
+ )
+
+ request_body = {
+ "intake_key": self.configuration.intake_key,
+ "jsons": chunk,
+ }
+
+ for attempt in self._retry():
+ with attempt:
+ async with session.post(
+ batch_api,
+ headers={"User-Agent": self._connector_user_agent},
+ json=request_body,
+ ) as response:
+ if response.status >= 300:
+ error = await response.text()
+ error_message = f"Chunk {chunk_index} error: {error}"
+ exception = RuntimeError(error_message)
+
+ self.log(message=error_message, level="error")
+ self.log_exception(exception)
+
+ raise exception
+
+ result = await response.json()
+
+ self.log(
+ f"Successfully pushed chunk {chunk_index} to intakes"
+ )
+
+ result_ids.extend(result.get("event_ids", []))
+
+ return result_ids
diff --git a/sekoia_automation/aio/helpers/__init__.py b/sekoia_automation/aio/helpers/__init__.py
new file mode 100644
index 0000000..1a7ea2d
--- /dev/null
+++ b/sekoia_automation/aio/helpers/__init__.py
@@ -0,0 +1,5 @@
+"""
+Package contains all utilities and useful helpers.
+
+NOTE!!!: each package inside requires additional libraries to be installed.
+"""
diff --git a/sekoia_automation/aio/helpers/aws/__init__.py b/sekoia_automation/aio/helpers/aws/__init__.py
new file mode 100644
index 0000000..14eba59
--- /dev/null
+++ b/sekoia_automation/aio/helpers/aws/__init__.py
@@ -0,0 +1,7 @@
+"""
+Utilities and wrappers to work with aws.
+
+To use this package you need to install additional libraries:
+
+* aiobotocore (https://github.com/aio-libs/aiobotocore)
+"""
diff --git a/sekoia_automation/aio/helpers/aws/client.py b/sekoia_automation/aio/helpers/aws/client.py
new file mode 100644
index 0000000..f4aa0cc
--- /dev/null
+++ b/sekoia_automation/aio/helpers/aws/client.py
@@ -0,0 +1,119 @@
+"""Aws base client wrapper with its config class."""
+
+from functools import cached_property
+from typing import Generic, TypeVar
+
+from aiobotocore.session import AioCredentials, AioSession, ClientCreatorContext
+from botocore.credentials import CredentialProvider
+from pydantic import BaseModel, Field
+
+
+class AwsConfiguration(BaseModel):
+ """AWS client base configuration."""
+
+ aws_access_key_id: str = Field(description="AWS access key id")
+ aws_secret_access_key: str = Field(description="AWS secret access key")
+ aws_region: str = Field(description="AWS region name")
+
+
+AwsConfigurationT = TypeVar("AwsConfigurationT", bound=AwsConfiguration)
+
+
+class _CredentialsProvider(CredentialProvider):
+ """Custom credentials provider."""
+
+ METHOD = "_sekoia_credentials_provider"
+
+ def __init__(self, access_key: str, secret_key: str) -> None:
+ """
+ Initialize CredentialsProvider.
+
+ Args:
+ access_key: str
+ secret_key: str
+ """
+ self.access_key = access_key
+ self.secret_key = secret_key
+
+ async def load(self) -> AioCredentials:
+ """
+ Load credentials.
+
+ Returns:
+ ReadOnlyCredentials
+ """
+ return AioCredentials(
+ access_key=self.access_key,
+ secret_key=self.secret_key,
+ method=self.METHOD,
+ )
+
+
+class AwsClient(Generic[AwsConfigurationT]):
+ """
+ Aws base client.
+
+ All other clients should extend this base client.
+ """
+
+ _configuration: AwsConfigurationT | None = None
+ _credentials_provider: _CredentialsProvider | None = None
+
+ def __init__(self, configuration: AwsConfigurationT | None = None) -> None:
+ """
+ Initialize AwsClient.
+
+ Args:
+ configuration: AwsConfigurationT
+ """
+ self._configuration = configuration
+
+ if self._configuration:
+ self._credentials_provider = _CredentialsProvider(
+ self._configuration.aws_access_key_id,
+ self._configuration.aws_secret_access_key,
+ )
+
+ @cached_property
+ def get_session(self) -> AioSession:
+ """
+ Get AWS session.
+
+ Returns:
+ AioSession:
+ """
+ session = AioSession()
+
+ # Make our own creds provider to be executed at 1 place
+ if self._credentials_provider:
+ credential_provider = session.get_component("credential_provider")
+ credential_provider.insert_before("env", self._credentials_provider)
+
+ return session
+
+ def get_client(
+ self,
+ client_name: str,
+ region_name: str | None = None,
+ ) -> ClientCreatorContext:
+ """
+ Get AWS client.
+
+ Args:
+ client_name: str
+ region_name: str | None
+
+ Returns:
+ ClientCreatorContext:
+ """
+ _region_name = region_name
+ if not region_name and self._configuration is not None:
+ _region_name = self._configuration.aws_region
+
+ if not _region_name:
+ raise ValueError("Region name is required. You should specify it.")
+
+ return self.get_session.create_client(
+ client_name,
+ region_name=_region_name,
+ )
diff --git a/sekoia_automation/aio/helpers/files/__init__.py b/sekoia_automation/aio/helpers/files/__init__.py
new file mode 100644
index 0000000..6d4b072
--- /dev/null
+++ b/sekoia_automation/aio/helpers/files/__init__.py
@@ -0,0 +1,8 @@
+"""
+Utilities to work with files and directories.
+
+To use this package you need to install additional libraries:
+
+* aiofiles (https://github.com/Tinche/aiofiles)
+* aiocsv (https://github.com/MKuranowski/aiocsv)
+"""
diff --git a/sekoia_automation/aio/helpers/files/csv.py b/sekoia_automation/aio/helpers/files/csv.py
new file mode 100644
index 0000000..54f748e
--- /dev/null
+++ b/sekoia_automation/aio/helpers/files/csv.py
@@ -0,0 +1,28 @@
+"""Useful functions for working with csv files."""
+
+from collections.abc import AsyncGenerator
+from typing import Any
+
+import aiocsv
+import aiofiles
+
+
+async def csv_file_as_rows(
+ file_path: str, encoding: str = "utf-8", delimiter: str = ","
+) -> AsyncGenerator[dict[str, Any], None]:
+ """
+ Read csv file as rows.
+
+ Transform each row into dict with keys from the header row.
+
+ Args:
+ file_path: str
+ encoding: str
+ delimiter: str
+
+ Yields:
+ dict[str, Any]:
+ """
+ async with aiofiles.open(file_path, encoding=encoding) as file:
+ async for row in aiocsv.AsyncDictReader(file, delimiter=delimiter):
+ yield row
diff --git a/sekoia_automation/aio/helpers/files/utils.py b/sekoia_automation/aio/helpers/files/utils.py
new file mode 100644
index 0000000..cdb280d
--- /dev/null
+++ b/sekoia_automation/aio/helpers/files/utils.py
@@ -0,0 +1,13 @@
+"""Wrappers to perform base operations with files."""
+
+from aiofiles import os as aio_os
+
+
+async def delete_file(file_name: str) -> None:
+ """
+ Delete file.
+
+ Args:
+ file_name: str
+ """
+ await aio_os.remove(file_name)
diff --git a/sekoia_automation/aio/helpers/http/__init__.py b/sekoia_automation/aio/helpers/http/__init__.py
new file mode 100644
index 0000000..aaa7d1c
--- /dev/null
+++ b/sekoia_automation/aio/helpers/http/__init__.py
@@ -0,0 +1,8 @@
+"""
+Utilities and wrappers to work with http services.
+
+To use this package you need to install additional libraries:
+
+* aiohttp (https://github.com/aio-libs/aiohttp)
+* aiofiles (https://github.com/Tinche/aiofiles)
+"""
diff --git a/sekoia_automation/aio/helpers/http/http_client.py b/sekoia_automation/aio/helpers/http/http_client.py
new file mode 100644
index 0000000..4bfb8e9
--- /dev/null
+++ b/sekoia_automation/aio/helpers/http/http_client.py
@@ -0,0 +1,91 @@
+"""HttpClient with ratelimiter."""
+
+from collections.abc import AsyncGenerator
+from contextlib import asynccontextmanager
+
+from aiohttp import ClientSession
+from aiolimiter import AsyncLimiter
+
+
+class HttpClient:
+ """
+ Http client with optional rate limiting.
+
+ Example:
+ >>> from sekoia_automation.helpers.aio.http.http_client import HttpClient
+ >>> class CustomHttpClient(HttpClient):
+ >>> def __init__(self):
+ >>> super().__init__()
+ >>>
+ >>> async def load_data(self, url: str) -> str:
+ >>> async with self.session() as session:
+ >>> async with session.get(url) as response:
+ >>> return await response.text()
+ >>>
+ >>> client = CustomHttpClient()
+ >>> # If rate limiter is set, it will be used
+ >>> client.set_rate_limit(max_rate=10, time_period=60)
+ >>> # or
+ >>> client.set_rate_limiter(AsyncLimiter(max_rate=10, time_period=60))
+ >>>
+ >>> result = await client.load_data("https://example.com")
+ """
+
+ _session: ClientSession | None = None
+ _rate_limiter: AsyncLimiter | None = None
+
+ def __init__(
+ self,
+ max_rate: float | None = None,
+ time_period: float | None = None,
+ rate_limiter: AsyncLimiter | None = None,
+ ):
+ """
+ Initialize HttpClient.
+
+ Args:
+ max_rate: float | None
+ time_period: float | None
+ rate_limiter: AsyncLimiter | None
+ """
+ if max_rate and time_period:
+ self.set_rate_limit(max_rate, time_period) # pragma: no cover
+
+ if rate_limiter:
+ self.set_rate_limiter(rate_limiter) # pragma: no cover
+
+ def set_rate_limit(self, max_rate: float, time_period: float = 60) -> None:
+ """
+ Set rate limiter.
+
+ Args:
+ max_rate: float
+ time_period: float
+ """
+ self._rate_limiter = AsyncLimiter(max_rate=max_rate, time_period=time_period)
+
+ def set_rate_limiter(self, rate_limiter: AsyncLimiter) -> None: # pragma: no cover
+ """
+ Set rate limiter.
+
+ Args:
+ rate_limiter:
+ """
+ self._rate_limiter = rate_limiter
+
+ @asynccontextmanager
+ async def session(self) -> AsyncGenerator[ClientSession, None]:
+ """
+ Get configured session with rate limiter.
+
+ Yields:
+ AsyncGenerator[ClientSession, None]:
+ """
+ if self._session is None:
+ self._session = ClientSession()
+
+ if self._rate_limiter:
+ async with self._rate_limiter:
+ yield self._session
+ else:
+ yield self._session
diff --git a/sekoia_automation/aio/helpers/http/token_refresher.py b/sekoia_automation/aio/helpers/http/token_refresher.py
new file mode 100644
index 0000000..ef3cb30
--- /dev/null
+++ b/sekoia_automation/aio/helpers/http/token_refresher.py
@@ -0,0 +1,150 @@
+"""Auth token refresher wrapper with token schema."""
+
+import asyncio
+import time
+from asyncio import Task
+from collections.abc import AsyncGenerator
+from contextlib import asynccontextmanager
+from typing import Generic, TypeVar
+
+from aiohttp import ClientSession
+from pydantic import BaseModel
+from pydantic.generics import GenericModel
+
+HttpTokenT = TypeVar("HttpTokenT", bound=BaseModel)
+
+
+class RefreshedToken(GenericModel, Generic[HttpTokenT]):
+ """Model to work with auth token with additional info."""
+
+ token: HttpTokenT
+ created_at: int
+ ttl: int
+
+ def is_valid(self) -> bool:
+ """
+ Check if token is not expired yet and valid.
+
+ Returns:
+ bool:
+ """
+ return not self.is_expired()
+
+ def is_expired(self) -> bool:
+ """
+ Check if token is expired.
+
+ Returns:
+ bool:
+ """
+ return self.created_at + self.ttl < (time.time() - 1)
+
+
+RefreshedTokenT = TypeVar("RefreshedTokenT", bound=RefreshedToken)
+
+
+class GenericTokenRefresher(Generic[RefreshedTokenT]):
+ """
+ Contains access token refresher logic.
+
+ Example of usage:
+ >>> # Define schema for token response from server
+ >>> class HttpToken(BaseModel):
+ >>> access_token: str
+ >>> signature: str
+ >>>
+ >>> # Define TokenRefresher class with necessary logic
+ >>> class CustomTokenRefresher(GenericTokenRefresher):
+ >>> def __init__(self, client_id: str, client_secret: str, auth_url: str):
+ >>> super().__init__()
+ >>> ...
+ >>> def get_token(self) -> RefreshedToken[HttpToken]:
+ >>> ...
+ >>>
+ >>> token_refresher = GenericTokenRefresher[RefreshedToken[HttpToken]]()
+ >>>
+ >>> async with token_refresher.with_access_token() as access_token:
+ >>> print(access_token)
+ """
+
+ _session: ClientSession | None = None
+
+ def __init__(self):
+ """Initialize GenericTokenRefresher."""
+
+ self._token: RefreshedTokenT | None = None
+ self._token_refresh_task: Task[None] | None = None
+
+ def session(self) -> ClientSession:
+ """
+ Initialize client session.
+
+ Singleton client session to work with token refresh logic.
+
+ Returns:
+ ClientSession:
+ """
+ if not self._session:
+ self._session = ClientSession()
+
+ return self._session
+
+ async def get_token(self) -> RefreshedTokenT:
+ """
+ Get new token logic.
+
+ Main method to get new token.
+
+ Returns:
+ RefreshedTokenT: instance of RefreshedToken
+ """
+ raise NotImplementedError(
+ "You should implement `get_token` method in child class"
+ )
+
+ async def _refresh_token(self) -> None:
+ """
+ Refresh token logic.
+
+ Also triggers token refresh task.
+ """
+ self._token = await self.get_token()
+ await self._schedule_token_refresh(self._token.ttl)
+
+ async def _schedule_token_refresh(self, expires_in: int) -> None:
+ """
+ Schedule token refresh.
+
+ Args:
+ expires_in: int
+ """
+ await self.close()
+
+ async def _refresh() -> None:
+ await asyncio.sleep(expires_in)
+ await self._refresh_token()
+
+ self._token_refresh_task = asyncio.create_task(_refresh())
+
+ async def close(self) -> None:
+ """
+ Cancel token refresh task.
+ """
+ if self._token_refresh_task:
+ self._token_refresh_task.cancel()
+
+ @asynccontextmanager
+ async def with_access_token(self) -> AsyncGenerator[RefreshedTokenT, None]:
+ """
+ Get access token.
+
+ Yields:
+ RefreshedTokenT:
+ """
+ if self._token is None:
+ await self._refresh_token()
+
+ if not self._token:
+ raise ValueError("Token can not be initialized")
+
+ yield self._token
diff --git a/sekoia_automation/aio/helpers/http/utils.py b/sekoia_automation/aio/helpers/http/utils.py
new file mode 100644
index 0000000..0e32d7d
--- /dev/null
+++ b/sekoia_automation/aio/helpers/http/utils.py
@@ -0,0 +1,31 @@
+import aiofiles
+from aiohttp import ClientResponse
+
+
+async def save_aiohttp_response(
+ response: ClientResponse, chunk_size: int = 1024, temp_dir: str = "/tmp"
+) -> str:
+ """
+ Save aiohttp response to temp file.
+
+ Args:
+ response: ClientResponse
+ chunk_size: int
+ temp_dir: str
+
+ Returns:
+ str: path to temp file
+ """
+ async with aiofiles.tempfile.NamedTemporaryFile(
+ "wb",
+ delete=False,
+ dir=temp_dir,
+ ) as file:
+ while True:
+ chunk = await response.content.read(chunk_size)
+ if not chunk:
+ break
+
+ await file.write(chunk)
+
+ return str(file.name)
diff --git a/sekoia_automation/connector/__init__.py b/sekoia_automation/connector/__init__.py
index b8205e8..26cd84f 100644
--- a/sekoia_automation/connector/__init__.py
+++ b/sekoia_automation/connector/__init__.py
@@ -1,4 +1,5 @@
import uuid
+from abc import ABC
from collections.abc import Generator, Sequence
from concurrent.futures import ThreadPoolExecutor
from concurrent.futures import wait as wait_futures
@@ -27,7 +28,7 @@ class DefaultConnectorConfiguration(BaseModel):
chunk_size: int = 1000
-class Connector(Trigger):
+class Connector(Trigger, ABC):
configuration: DefaultConnectorConfiguration
seconds_without_events = 3600 * 6
@@ -52,7 +53,7 @@ def _retry(self):
)
@cached_property
- def __connector_user_agent(self):
+ def _connector_user_agent(self) -> str:
return f"sekoiaio-connector-{self.configuration.intake_key}"
def _send_chunk(
@@ -70,7 +71,7 @@ def _send_chunk(
res: Response = requests.post(
batch_api,
json=request_body,
- headers={"User-Agent": self.__connector_user_agent},
+ headers={"User-Agent": self._connector_user_agent},
timeout=30,
)
res.raise_for_status()
@@ -80,7 +81,19 @@ def _send_chunk(
self.log(message=message, level="error")
self.log_exception(ex, message=message)
- def push_events_to_intakes(self, events: list[str], sync: bool = False) -> list:
+ def push_events_to_intakes(
+ self, events: list[str], sync: bool = False
+ ) -> list[str]:
+ """
+ Push events to intakes.
+
+ Args:
+ events: list[str]
+ sync: bool
+
+ Returns:
+ list[str]
+ """
# no event to push
if not events:
return []
@@ -127,7 +140,18 @@ def send_records(
event_name: str,
to_file: bool = True,
records_var_name: str = "records",
- ):
+ ) -> None:
+ """
+ Sends records to the intake.
+
+ Optionally persists events to file.
+
+ Args:
+ records: list
+ event_name: str
+ to_file: bool
+ records_var_name: str
+ """
if not to_file:
self.send_event(event={records_var_name: records}, event_name=event_name)
return
@@ -153,12 +177,19 @@ def send_records(
)
def _chunk_events(
- self, events: Sequence, chunk_size: int
+ self,
+ events: Sequence,
+ chunk_size: int,
) -> Generator[list[Any], None, None]:
- """Group events by chunk.
+ """
+ Group events by chunk.
+
+ Args:
+ sequence events: Sequence: The events to group
+ chunk_size: int: The size of the chunk
- :param sequence events: The events to group
- :param int chunk_size: The size of the chunk
+ Returns:
+ Generator[list[Any], None, None]:
"""
chunk: list[Any] = []
chunk_bytes: int = 0
@@ -196,14 +227,15 @@ def _chunk_events(
"were discarded (length > 64kb)"
)
- def forward_events(self, events):
+ def forward_events(self, events) -> None:
try:
chunks = self._chunk_events(events, self.configuration.chunk_size)
+ _name = self.name or "" # mypy complains about NoneType in annotation
for records in chunks:
self.log(message=f"Forwarding {len(records)} records", level="info")
self.send_records(
records=list(records),
- event_name=f"{self.name.lower().replace(' ', '-')}_{time()!s}",
+ event_name=f"{_name.lower().replace(' ', '-')}_{time()!s}",
)
except Exception as ex:
self.log_exception(ex, message="Failed to forward events")
diff --git a/sekoia_automation/loguru/__init__.py b/sekoia_automation/loguru/__init__.py
new file mode 100644
index 0000000..b526328
--- /dev/null
+++ b/sekoia_automation/loguru/__init__.py
@@ -0,0 +1,5 @@
+"""
+Configure LOGURU logger.
+
+https://github.com/Delgan/loguru
+"""
diff --git a/sekoia_automation/loguru/config.py b/sekoia_automation/loguru/config.py
new file mode 100644
index 0000000..9d907ce
--- /dev/null
+++ b/sekoia_automation/loguru/config.py
@@ -0,0 +1,94 @@
+"""Logging configuration and init function."""
+
+import logging
+import sys
+
+from loguru import logger
+from pydantic import BaseModel, validator
+
+from .formatters import format_record
+from .handlers import InterceptHandler
+
+
+class LoggingConfig(BaseModel):
+ """Contain all necessary logging config."""
+
+ log_lvl: str = "INFO"
+
+ log_file: str = "logs/{time:YYYY-MM-DD}.log"
+ log_rotation: str = "00:00"
+ log_retention: str = "1 month"
+ log_compression: str = "zip"
+ log_queue: bool = True
+
+ json_logs: bool = False
+
+ loguru_format: str = "".join(
+ [
+ "{time:YYYY-MM-DD HH:mm:ss.SSS} | ",
+ "{level: <5} | ",
+ "{message}",
+ ],
+ )
+
+ @classmethod
+ @validator("log_lvl", pre=True)
+ def assemble_log_lvl(cls, log_lvl: str) -> str: # pragma: no cover
+ """
+ Format and validate log lvl str.
+
+ Args:
+ log_lvl: str
+
+ Returns:
+ str:
+
+ Raises:
+ ValueError: if input string is invalid
+ """
+ upper_str = log_lvl.upper()
+ if isinstance(logging.getLevelName(upper_str), str):
+ raise ValueError(f"Incorrect log lvl variable {log_lvl}")
+
+ return upper_str
+
+
+def init_logging(log_conf: LoggingConfig = LoggingConfig()) -> None:
+ """
+ Replace logging handlers with a custom handler.
+
+ This function should be called at application startup in the beginning.
+ Example:
+ >>> from sekoia_automation.helpers.loguru.config import init_logging
+ >>> if __name__ == "__main__":
+ >>> init_logging()
+ >>> # Other part of application
+ >>> from loguru import logger
+ >>> ...
+ >>> logger.info("Log message formatted {one} {two}", one="First", two="Second")
+
+ Args:
+ log_conf: LoggingConfig
+
+ Returns:
+ LoggingConfig:
+ """
+ logging.root.handlers = [InterceptHandler()]
+ logging.root.setLevel(log_conf.log_lvl)
+
+ for name in logging.root.manager.loggerDict.keys():
+ logging.getLogger(name).handlers = []
+ logging.getLogger(name).propagate = True
+
+ logger.configure(
+ handlers=[
+ {
+ "sink": sys.stdout,
+ "serialize": log_conf.json_logs,
+ "format": lambda values: format_record(
+ values,
+ log_conf.loguru_format,
+ ),
+ },
+ ],
+ )
diff --git a/sekoia_automation/loguru/formatters.py b/sekoia_automation/loguru/formatters.py
new file mode 100644
index 0000000..4bd14f5
--- /dev/null
+++ b/sekoia_automation/loguru/formatters.py
@@ -0,0 +1,39 @@
+"""Set of formatter functions."""
+
+from pprint import pformat
+from typing import Any
+
+
+def format_record(record: dict[Any, Any], loguru_format: str) -> str:
+ """
+ Format loguru_format based on record.
+
+ Use pformat for log any data like request/response body during debug.
+ Work with logging if loguru handle it.
+ Example:
+ >>> payload = [{"users":[{"name": "Nick", "age": 87, "is_active": True}]
+ >>> logger.bind(payload=).debug("users payload")
+ >>> [
+ >>> {
+ >>> 'count': 1,
+ >>> 'users': [ {'age': 87, 'is_active': True, 'name': 'Nick'} ]
+ >>> }
+ >>> ]
+
+ Args:
+ record: dict[any, any]
+ loguru_format: str
+
+ Returns:
+ str: logging string
+ """
+ format_str = loguru_format
+ if record["extra"].get("payload") is not None:
+ record["extra"]["payload"] = pformat(
+ record["extra"]["payload"],
+ indent=4,
+ compact=True,
+ )
+ format_str = "".join([loguru_format, "\n{extra[payload]}"])
+
+ return "".join([format_str, "{exception}\n"])
diff --git a/sekoia_automation/loguru/handlers.py b/sekoia_automation/loguru/handlers.py
new file mode 100644
index 0000000..2dc8f72
--- /dev/null
+++ b/sekoia_automation/loguru/handlers.py
@@ -0,0 +1,42 @@
+"""Base InterceptHandler class."""
+
+import logging
+
+from loguru import logger
+
+
+class InterceptHandler(logging.Handler):
+ """
+ Default handler.
+
+ See docs https://loguru.readthedocs.io/en/stable/overview.html#entirely-compatible-with-standard-logging
+ """
+
+ def emit(self, record: logging.LogRecord) -> None:
+ """
+ Get corresponding Loguru level if it exists.
+
+ Args:
+ record: LogRecord
+
+ Raises:
+ ValueError: in case if any errors during logging raises
+ """
+ try:
+ level = logger.level(record.levelname).name
+ except ValueError:
+ level = record.levelname
+
+ # Find caller from where originated the logged message
+ frame, depth = logging.currentframe(), 2
+ while frame.f_code.co_filename == logging.__file__:
+ if frame.f_back is None:
+ raise ValueError("f_back error while logger")
+
+ frame = frame.f_back
+ depth += 1
+
+ logger.opt(depth=depth, exception=record.exc_info).log(
+ level,
+ record.getMessage(),
+ )
diff --git a/sekoia_automation/trigger.py b/sekoia_automation/trigger.py
index 4a6e0bb..e768170 100644
--- a/sekoia_automation/trigger.py
+++ b/sekoia_automation/trigger.py
@@ -1,6 +1,7 @@
import json
import signal
from abc import abstractmethod
+from collections.abc import Generator
from contextlib import contextmanager
from datetime import datetime, timedelta
from functools import cached_property
@@ -77,12 +78,13 @@ def __init__(self, module: Module | None = None, data_path: Path | None = None):
retry_error_callback=capture_retry_error,
)
def _get_secrets_from_server(self) -> dict[str, Any]:
- """Calls the API to fetch this trigger's secrets
+ """
+ Calls the API to fetch this trigger's secrets.
- If self.module has no secrets configured, we don't do anything
+ If `self.module` has no secrets configured, we don't do anything.
- :return: A dict mapping the configuration's secrets to their value
- :rtype: dict[str, Any]
+ Returns:
+ dict[str, Any]:
"""
secrets = {}
if self.module.has_secrets():
@@ -98,11 +100,10 @@ def _get_secrets_from_server(self) -> dict[str, Any]:
self._log_request_error(exception)
return secrets
- def stop(self, *args, **kwargs): # noqa: ARG002
+ def stop(self, *args, **kwargs) -> None: # noqa: ARG002
"""
Engage the trigger exit
"""
- # Exit signal received, asking the processor to stop
self._stop_event.set()
@property
@@ -123,6 +124,12 @@ def configuration(self) -> dict | BaseModel | None:
@configuration.setter
def configuration(self, configuration: dict) -> None:
+ """
+ Set the trigger configuration.
+
+ Args:
+ configuration: dict
+ """
try:
self._configuration = get_as_model(
get_annotation_for(self.__class__, "configuration"), configuration
@@ -175,10 +182,12 @@ def execute(self) -> None:
# Send remaining logs if any
self._send_logs_to_api()
- def _rm_tree(self, path: Path):
- """Delete a directory and its children.
+ def _rm_tree(self, path: Path) -> None:
+ """
+ Delete a directory and its children.
- :param Path path: The directory to delete
+ Args:
+ path: Path: The directory to delete
"""
# iter over children
for child in path.iterdir():
@@ -194,8 +203,22 @@ def _rm_tree(self, path: Path):
path.rmdir()
@contextmanager
- def _ensure_directory(self, directory: str | None, remove_directory: bool = False):
- """Make sure the directory exists."""
+ def _ensure_directory(
+ self, directory: str | None = None, remove_directory: bool = False
+ ) -> Generator[str | None, None, None]:
+ """
+ Make sure the directory exists.
+
+ Args:
+ directory: str | None
+ remove_directory: bool
+
+ Raises:
+ InvalidDirectoryError: If the directory doesn't exist
+
+ Returns:
+ Generator[str | None, None, None]:
+ """
if directory:
# This will work for both relative and absolute path
directory_path = self.data_path.joinpath(directory)
@@ -218,8 +241,16 @@ def send_normalized_event(
event: dict,
directory: str | None = None,
remove_directory: bool = False,
- ):
- """Send a normalized event to Sekoia.io so that it triggers a playbook run."""
+ ) -> None:
+ """
+ Send a normalized event to Sekoia.io so that it triggers a playbook run.
+
+ Args:
+ event_name: str
+ event: dict
+ directory: str | None
+ remove_directory: bool
+ """
# Reset the consecutive error count
self._error_count = 0
self._last_events_time = datetime.utcnow()
@@ -237,10 +268,17 @@ def send_event(
event: dict,
directory: str | None = None,
remove_directory: bool = False,
- ):
- """Send an event to Sekoia.io so that it triggers playbook runs.
+ ) -> None:
+ """
+ Send an event to Sekoia.io so that it triggers playbook runs.
Makes sure `results_model` is used to validate/coerce the event if present
+
+ Args:
+ event_name: str
+ event: dict
+ directory: str | None
+ remove_directory: bool
"""
return self.send_normalized_event(
event_name,
@@ -302,7 +340,8 @@ def _send_logs_to_api(self):
@abstractmethod
def run(self) -> None:
- """Method that each trigger should implement to contain its logic.
+ """
+ Method that each trigger should implement to contain its logic.
Should usually be an infinite loop, calling send_event when relevant.
"""
@@ -357,9 +396,7 @@ def is_alive(self) -> bool:
return False
def liveness_context(self) -> dict:
- """
- Context returned when the health endpoint is requested
- """
+ """Context returned when the health endpoint is requested."""
return {
"last_events_time": self._last_events_time.isoformat(),
"seconds_without_events_threshold": self.seconds_without_events,
diff --git a/tests/aio/__init__.py b/tests/aio/__init__.py
new file mode 100644
index 0000000..7693a37
--- /dev/null
+++ b/tests/aio/__init__.py
@@ -0,0 +1 @@
+"""Tests for sekoia_automation.aio.helpers module"""
diff --git a/tests/aio/helpers/__init__.py b/tests/aio/helpers/__init__.py
new file mode 100644
index 0000000..7c1b570
--- /dev/null
+++ b/tests/aio/helpers/__init__.py
@@ -0,0 +1 @@
+"""Tests for sekoia_automation.helpers module."""
diff --git a/tests/aio/helpers/http/__init__.py b/tests/aio/helpers/http/__init__.py
new file mode 100644
index 0000000..7d7fc9f
--- /dev/null
+++ b/tests/aio/helpers/http/__init__.py
@@ -0,0 +1 @@
+"""Tests for `sekoia_automation.aio.helpers.http` module."""
diff --git a/tests/aio/helpers/http/test_http_client_session.py b/tests/aio/helpers/http/test_http_client_session.py
new file mode 100644
index 0000000..b2dc26f
--- /dev/null
+++ b/tests/aio/helpers/http/test_http_client_session.py
@@ -0,0 +1,199 @@
+"""Tests for sekoia_automation.helpers.aio.http.http_client."""
+import time
+
+import pytest
+from aioresponses import aioresponses
+from pydantic import BaseModel
+
+from sekoia_automation.aio.helpers.http.http_client import HttpClient
+from sekoia_automation.aio.helpers.http.token_refresher import (
+ GenericTokenRefresher,
+ RefreshedToken,
+)
+
+
+class TokenResponse(BaseModel):
+ """Test implementation of token response."""
+
+ access_token: str
+
+
+class TokenRefresher(GenericTokenRefresher):
+ """Test implementation of GenericTokenRefresher."""
+
+ async def get_token(self) -> RefreshedToken[TokenResponse]:
+ """
+ Test implementation of get_token.
+
+ Returns:
+ RefreshedToken[TokenResponse]:
+ """
+ async with self.session().post(url=self.auth_url, json={}) as response:
+ response_data = await response.json()
+
+ return RefreshedToken(
+ token=TokenResponse(**response_data),
+ created_at=int(time.time()),
+ ttl=3600,
+ )
+
+ def __init__(self, client_id: str, client_secret: str, auth_url: str):
+ """Initialize TokenRefresher."""
+ super().__init__()
+ self.client_id = client_id
+ self.client_secret = client_secret
+ self.auth_url = auth_url
+
+
+class CustomHttpClient(HttpClient):
+ """Complete test implementation of HttpClient with TokenRefresher."""
+
+ def __init__(
+ self,
+ client_id: str,
+ client_secret: str,
+ auth_url: str,
+ base_url: str,
+ ) -> None:
+ """Initialize CustomHttpClient."""
+ super().__init__()
+ self.base_url = base_url
+ self.token_refresher = TokenRefresher(
+ client_id=client_id,
+ client_secret=client_secret,
+ auth_url=auth_url,
+ )
+
+ async def get_test_data(self, url: str) -> dict[str, str]:
+ """
+ Test method to get data from server with authentication.
+
+ Args:
+ url: str
+
+ Returns:
+ dict[str, str]:
+ """
+ async with self.token_refresher.with_access_token() as access_token:
+ async with self.session() as session:
+ async with session.get(
+ url=url,
+ headers={
+ "Authorization": f"Bearer {access_token.token.access_token}"
+ },
+ ) as response:
+ return await response.json()
+
+
+@pytest.fixture
+def auth_url(session_faker):
+ """
+ Fixture to initialize auth_url.
+
+ Returns:
+ str:
+ """
+ return session_faker.uri()
+
+
+@pytest.fixture
+def base_url(session_faker):
+ """
+ Fixture to initialize base_url.
+
+ Returns:
+ str:
+ """
+ return session_faker.uri()
+
+
+@pytest.fixture
+def http_client(session_faker, auth_url, base_url):
+ """
+ Fixture to initialize HttpClient.
+
+ Returns:
+ CustomHttpClient:
+ """
+ return CustomHttpClient(
+ client_id=session_faker.word(),
+ client_secret=session_faker.word(),
+ auth_url=auth_url,
+ base_url=base_url,
+ )
+
+
+@pytest.mark.asyncio
+async def test_http_client_get_data(session_faker, http_client, base_url, auth_url):
+ """
+ Test http_client get data.
+
+ Args:
+ session_faker: Faker
+ http_client: CustomHttpClient
+ base_url: str
+ auth_url: str
+ """
+ token_response = TokenResponse(access_token=session_faker.word())
+
+ get_test_data_response = {
+ session_faker.word(): session_faker.word(),
+ session_faker.word(): session_faker.word(),
+ }
+
+ with aioresponses() as mocked_responses:
+ mocked_responses.post(auth_url, payload=token_response.dict())
+ mocked_responses.get(f"{base_url}/test", payload=get_test_data_response)
+
+ test_data = await http_client.get_test_data(url=f"{base_url}/test")
+
+ assert test_data == get_test_data_response
+
+ await http_client.token_refresher.close()
+ await http_client.token_refresher._session.close()
+ await http_client._session.close()
+
+
+@pytest.mark.asyncio
+async def test_http_client_get_data_async_limiter(
+ session_faker,
+ http_client,
+ base_url,
+ auth_url,
+):
+ """
+ Test http_client get data with async_limiter.
+
+ Args:
+ session_faker: Faker
+ http_client: CustomHttpClient
+ base_url: str
+ auth_url: str
+ """
+ token_response = TokenResponse(access_token=session_faker.word())
+
+ # 1 request per 3 seconds
+ http_client.set_rate_limit(1, 3)
+
+ get_test_data_response = {
+ session_faker.word(): session_faker.word(),
+ session_faker.word(): session_faker.word(),
+ }
+
+ with aioresponses() as mocked_responses:
+ start_query_time = time.time()
+ mocked_responses.post(auth_url, payload=token_response.dict())
+ mocked_responses.get(f"{base_url}/test", payload=get_test_data_response)
+ await http_client.get_test_data(url=f"{base_url}/test")
+
+ mocked_responses.post(auth_url, payload=token_response.dict())
+ mocked_responses.get(f"{base_url}/test", payload=get_test_data_response)
+ await http_client.get_test_data(url=f"{base_url}/test")
+
+ end_query_time = time.time()
+
+ assert int(end_query_time - start_query_time) == 3
+
+ await http_client.token_refresher.close()
+ await http_client.token_refresher._session.close()
+ await http_client._session.close()
diff --git a/tests/aio/helpers/http/test_http_token_refresher.py b/tests/aio/helpers/http/test_http_token_refresher.py
new file mode 100644
index 0000000..7289ec7
--- /dev/null
+++ b/tests/aio/helpers/http/test_http_token_refresher.py
@@ -0,0 +1,240 @@
+"""Tests for `sekoia_automation.aio.helpers.http.token_refresher`."""
+import asyncio
+import time
+from asyncio import Lock
+from typing import ClassVar
+
+import pytest
+from aioresponses import aioresponses
+from pydantic import BaseModel
+
+from sekoia_automation.aio.helpers.http.token_refresher import (
+ GenericTokenRefresher,
+ RefreshedToken,
+)
+
+
+class CustomTokenResponse(BaseModel):
+ """Test implementation of token response."""
+
+ access_token: str
+ signature: str
+ random_field: str
+
+
+class CustomTokenRefresher(GenericTokenRefresher):
+ """
+ Test implementation of GenericTokenRefresher.
+
+ Contains some additional feature like default ttl and singleton impl.
+ """
+
+ _instances: ClassVar[dict[str, "CustomTokenRefresher"]] = {}
+ _locks: ClassVar[dict[str, Lock]] = {}
+
+ def __init__(
+ self,
+ client_id: str,
+ client_secret: str,
+ auth_url: str,
+ ttl: int = 60,
+ ) -> None:
+ """
+ Initialize CustomTokenRefresher.
+
+ Args:
+ client_id: str
+ client_secret: str
+ auth_url: str
+ ttl: int
+ """
+ super().__init__()
+ self.client_id = client_id
+ self.client_secret = client_secret
+ self.auth_url = auth_url
+ self.ttl = ttl
+
+ @classmethod
+ async def get_instance(
+ cls,
+ client_id: str,
+ client_secret: str,
+ auth_url: str,
+ ) -> "CustomTokenRefresher":
+ """
+ Get instance of CustomTokenRefresher.
+
+ Totally safe to use in async environment. Use lock to prevent multiple
+ instances creation. Get instance from cls._instances if it already exists
+ based on client_id, client_secret and auth_url.
+
+ Args:
+ client_id: str
+ client_secret: str
+ auth_url: str
+
+ Returns:
+ CustomTokenRefresher:
+ """
+ refresher_unique_key = str((client_id, client_secret, auth_url))
+ if not cls._locks.get(refresher_unique_key):
+ cls._locks[refresher_unique_key] = asyncio.Lock()
+
+ if not cls._instances.get(refresher_unique_key):
+ async with cls._locks[refresher_unique_key]:
+ if not cls._instances.get(refresher_unique_key):
+ cls._instances[refresher_unique_key] = CustomTokenRefresher(
+ client_id,
+ client_secret,
+ auth_url,
+ )
+
+ return cls._instances[refresher_unique_key]
+
+ async def get_token(self) -> RefreshedToken[CustomTokenResponse]:
+ """
+ Get token from server test implementation.
+
+ Returns:
+ RefreshedToken[CustomTokenResponse]:
+ """
+
+ async with self.session().post(url=self.auth_url, json={}) as response:
+ response_data = await response.json()
+ ttl = self.ttl
+ if (response_data.get("expires_in") or 0) > 0:
+ ttl = int(response_data.get("expires_in"))
+
+ return RefreshedToken(
+ token=CustomTokenResponse(**response_data),
+ created_at=int(time.time()),
+ ttl=ttl,
+ )
+
+
+@pytest.mark.asyncio
+async def test_token_refresher_1(session_faker):
+ """
+ Test token refresher with ttl from class.
+
+ Args:
+ session_faker: Faker
+ """
+ token_response = CustomTokenResponse(
+ access_token=session_faker.word(),
+ signature=session_faker.word(),
+ random_field=session_faker.word(),
+ )
+
+ client_id = session_faker.word()
+ client_secret = session_faker.word()
+ auth_url = session_faker.uri()
+
+ token_refresher = await CustomTokenRefresher.get_instance(
+ client_id,
+ client_secret,
+ auth_url,
+ )
+
+ assert token_refresher == await CustomTokenRefresher.get_instance(
+ client_id,
+ client_secret,
+ auth_url,
+ )
+
+ with aioresponses() as mocked_responses:
+ mocked_responses.post(auth_url, payload=token_response.dict())
+ await token_refresher._refresh_token()
+
+ assert token_refresher._token is not None
+ assert token_refresher._token.token.access_token == token_response.access_token
+ assert token_refresher._token.token.signature == token_response.signature
+ assert token_refresher._token.token.random_field == token_response.random_field
+ assert token_refresher._token.ttl == 60
+
+ await token_refresher._session.close()
+ await token_refresher.close()
+
+
+@pytest.mark.asyncio
+async def test_token_refresher_2(session_faker):
+ """
+ Test token refresher with ttl from server response.
+
+ Args:
+ session_faker: Faker
+ """
+ token_response = {
+ "access_token": session_faker.word(),
+ "signature": session_faker.word(),
+ "random_field": session_faker.word(),
+ "expires_in": session_faker.pyint(),
+ }
+
+ client_id = session_faker.word()
+ client_secret = session_faker.word()
+ auth_url = session_faker.uri()
+
+ with aioresponses() as mocked_responses:
+ token_refresher = CustomTokenRefresher(
+ client_id,
+ client_secret,
+ auth_url,
+ )
+
+ mocked_responses.post(auth_url, payload=token_response)
+ await token_refresher._refresh_token()
+
+ assert token_refresher._token is not None
+ assert token_refresher._token.token.access_token == token_response.get(
+ "access_token"
+ )
+ assert token_refresher._token.token.signature == token_response.get("signature")
+ assert token_refresher._token.token.random_field == token_response.get(
+ "random_field"
+ )
+ assert token_refresher._token.ttl == token_response.get("expires_in")
+
+ await token_refresher._session.close()
+ await token_refresher.close()
+
+
+@pytest.mark.asyncio
+async def test_token_refresher_with_token(session_faker):
+ """
+ Test token refresher with ttl from server response.
+
+ Args:
+ session_faker: Faker
+ """
+ token_response = {
+ "access_token": session_faker.word(),
+ "signature": session_faker.word(),
+ "random_field": session_faker.word(),
+ "expires_in": session_faker.pyint(),
+ }
+
+ client_id = session_faker.word()
+ client_secret = session_faker.word()
+ auth_url = session_faker.uri()
+
+ with aioresponses() as mocked_responses:
+ token_refresher = CustomTokenRefresher(
+ client_id,
+ client_secret,
+ auth_url,
+ )
+
+ mocked_responses.post(auth_url, payload=token_response)
+ async with token_refresher.with_access_token() as generated_token:
+ assert generated_token.token.access_token == token_response.get(
+ "access_token"
+ )
+ assert generated_token.token.signature == token_response.get("signature")
+ assert generated_token.token.random_field == token_response.get(
+ "random_field"
+ )
+ assert generated_token.ttl == token_response.get("expires_in")
+
+ await token_refresher._session.close()
+ await token_refresher.close()
diff --git a/tests/aio/helpers/http/test_http_utils.py b/tests/aio/helpers/http/test_http_utils.py
new file mode 100644
index 0000000..3f811c7
--- /dev/null
+++ b/tests/aio/helpers/http/test_http_utils.py
@@ -0,0 +1,50 @@
+"""Tests for `sekoia_automation.aio.helpers.http.utils`."""
+import os
+
+import aiofiles
+import pytest
+from aiohttp import ClientSession
+from aioresponses import aioresponses
+
+from sekoia_automation.aio.helpers.files.utils import delete_file
+from sekoia_automation.aio.helpers.http.utils import save_aiohttp_response
+
+
+@pytest.mark.asyncio
+async def test_save_response_to_temporary_file(tmp_path, session_faker):
+ """
+ Test save response to file.
+
+ Args:
+ tmp_path: Path
+ session_faker: Faker
+ """
+ data = session_faker.json(
+ data_columns={"test": ["name", "name", "name"]},
+ num_rows=1000,
+ )
+ with aioresponses() as mocked_responses:
+ url = session_faker.uri()
+ mocked_responses.get(
+ url=url,
+ status=200,
+ body=data,
+ headers={"Content-Length": f"{len(data)}"},
+ )
+
+ async with ClientSession() as session:
+ async with session.get(url) as response:
+ file_path = await save_aiohttp_response(
+ response, temp_dir=str(tmp_path)
+ )
+
+ assert os.path.exists(file_path)
+
+ async with aiofiles.open(file_path, encoding="utf-8") as file:
+ content = await file.read()
+
+ assert content == data
+
+ await delete_file(file_path)
+
+ assert not os.path.exists(file_path)
diff --git a/tests/aio/helpers/test_aws.py b/tests/aio/helpers/test_aws.py
new file mode 100644
index 0000000..d0a6b2d
--- /dev/null
+++ b/tests/aio/helpers/test_aws.py
@@ -0,0 +1,63 @@
+"""Tests related to `sekoia_automation.aio.helpers.aws` module."""
+
+import pytest
+from aiobotocore.session import AioSession
+
+from sekoia_automation.aio.helpers.aws.client import AwsClient, AwsConfiguration
+
+
+@pytest.fixture
+def aws_configuration():
+ """
+ Fixture for AwsConfiguration.
+
+ Returns:
+ AwsConfiguration:
+ """
+ return AwsConfiguration(
+ aws_access_key_id="ACCESS_KEY",
+ aws_secret_access_key="SECRET_KEY",
+ aws_region="us-east-1",
+ )
+
+
+@pytest.mark.asyncio
+async def test_aws_client_init(aws_configuration):
+ """
+ Test AwsClient initialization.
+
+ Args:
+ aws_configuration: AwsConfiguration
+ """
+ client = AwsClient(aws_configuration)
+
+ assert client._configuration == aws_configuration
+
+
+@pytest.mark.asyncio
+async def test_aws_client_get_session(aws_configuration):
+ """
+ Test AwsClient get_session.
+
+ Args:
+ aws_configuration: AwsConfiguration
+ """
+ client = AwsClient(aws_configuration)
+
+ session = client.get_session
+
+ assert isinstance(session, AioSession)
+
+ assert (
+ session.get_component("credential_provider").get_provider(
+ "_sekoia_credentials_provider"
+ )
+ == client._credentials_provider
+ )
+
+ assert (
+ session.get_component("credential_provider")._get_provider_offset(
+ "_sekoia_credentials_provider"
+ )
+ == 0
+ )
diff --git a/tests/aio/helpers/test_file_utils.py b/tests/aio/helpers/test_file_utils.py
new file mode 100644
index 0000000..4396bed
--- /dev/null
+++ b/tests/aio/helpers/test_file_utils.py
@@ -0,0 +1,88 @@
+"""Tests for file `sekoia_automation.aio.helpers.files` module."""
+
+import csv
+import os
+
+import pytest
+
+from sekoia_automation.aio.helpers.files.csv import csv_file_as_rows
+from sekoia_automation.aio.helpers.files.utils import delete_file
+
+
+@pytest.fixture
+def csv_content(session_faker) -> str:
+ """
+ Generate csv content.
+
+ Args:
+ session_faker: Faker
+ """
+ number_of_columns = session_faker.random.randint(1, 10)
+ number_of_rows = session_faker.random.randint(1, 50)
+
+ columns = [session_faker.word().upper() for _ in range(number_of_columns)]
+ rows = [
+ ",".join([session_faker.word() for _ in range(number_of_columns)])
+ for _ in range(number_of_rows)
+ ]
+
+ return "\n".join([",".join(columns), *rows])
+
+
+@pytest.fixture
+def random_text(session_faker) -> str:
+ """
+ Fixture for random text.
+
+ Args:
+ session_faker: Faker
+ """
+ nb_sentences = session_faker.pyint(min_value=2, max_value=10)
+
+ return session_faker.paragraph(nb_sentences=nb_sentences)
+
+
+@pytest.mark.asyncio
+async def test_delete_file(tmp_path, session_faker, random_text):
+ """
+ Test delete_file.
+
+ Args:
+ tmp_path: Path
+ session_faker: Faker
+ random_text: str
+ """
+ file_path = os.path.join(tmp_path, session_faker.word())
+ with open(file_path, "w+") as file:
+ file.write(random_text)
+
+ assert os.path.exists(file_path)
+
+ await delete_file(file_path)
+
+ assert not os.path.exists(file_path)
+
+
+@pytest.mark.asyncio
+async def test_csv_file_content(tmp_path, session_faker, csv_content):
+ """
+ Test read file content as csv.
+
+ Args:
+ tmp_path: Path
+ session_faker: Faker
+ csv_content: str
+ """
+ file_path = os.path.join(tmp_path, session_faker.word())
+ with open(file_path, "w+") as file:
+ file.write(csv_content)
+
+ result = []
+ async for row in csv_file_as_rows(file_path):
+ result.append(row)
+
+ assert result == list(csv.DictReader(csv_content.splitlines(), delimiter=","))
+
+ await delete_file(file_path)
+
+ assert not os.path.exists(file_path)
diff --git a/tests/aio/test_connector.py b/tests/aio/test_connector.py
new file mode 100644
index 0000000..1d2aa77
--- /dev/null
+++ b/tests/aio/test_connector.py
@@ -0,0 +1,220 @@
+"""Test async connector."""
+from unittest.mock import Mock, patch
+from urllib.parse import urljoin
+
+import pytest
+from aiolimiter import AsyncLimiter
+from aioresponses import aioresponses
+from faker import Faker
+from tenacity import Retrying, stop_after_attempt
+
+from sekoia_automation.aio.connector import AsyncConnector
+
+
+class DummyAsyncConnector(AsyncConnector):
+ trigger_activation: str | None = None
+
+ def run(self):
+ raise NotImplementedError
+
+
+@pytest.fixture
+def async_connector(storage, mocked_trigger_logs, faker: Faker):
+ with patch("sentry_sdk.set_tag"):
+ async_connector = DummyAsyncConnector(data_path=storage)
+
+ async_connector.trigger_activation = "2022-03-14T11:16:14.236930Z"
+ async_connector.configuration = {
+ "intake_key": "",
+ "intake_server": faker.uri(),
+ }
+
+ async_connector.log = Mock()
+ async_connector.log_exception = Mock()
+
+ yield async_connector
+
+ async_connector.stop()
+
+
+@pytest.mark.asyncio
+async def test_async_connector_rate_limiter(async_connector: DummyAsyncConnector):
+ """
+ Test async connector rate limiter.
+
+ Args:
+ async_connector: DummyAsyncConnector
+ """
+ other_instance = DummyAsyncConnector()
+ rate_limiter_mock = AsyncLimiter(max_rate=100)
+
+ assert async_connector._rate_limiter is None
+ assert other_instance._rate_limiter is None
+
+ assert async_connector.get_rate_limiter() == other_instance.get_rate_limiter()
+
+ async_connector.set_rate_limiter(rate_limiter_mock)
+
+ assert async_connector.get_rate_limiter() == other_instance.get_rate_limiter()
+ assert async_connector._rate_limiter == rate_limiter_mock
+
+ DummyAsyncConnector.set_rate_limiter(None)
+ DummyAsyncConnector.set_client_session(None)
+
+
+@pytest.mark.asyncio
+async def test_async_connector_client_session(async_connector: DummyAsyncConnector):
+ """
+ Test async connector client_session.
+
+ Args:
+ async_connector: DummyAsyncConnector
+ """
+ other_instance = DummyAsyncConnector()
+
+ assert async_connector._session is None
+ assert other_instance._session is None
+
+ async with async_connector.session() as session_1:
+ async with other_instance.session() as session_2:
+ assert session_1 == session_2
+
+ assert async_connector._rate_limiter is not None and isinstance(
+ async_connector._rate_limiter, AsyncLimiter
+ )
+
+ assert other_instance._rate_limiter is not None and isinstance(
+ other_instance._rate_limiter, AsyncLimiter
+ )
+
+ DummyAsyncConnector.set_rate_limiter(None)
+ other_instance.set_client_session(None)
+
+
+@pytest.mark.asyncio
+async def test_async_connector_push_single_event(
+ async_connector: DummyAsyncConnector, faker: Faker
+):
+ """
+ Test async connector push events.
+
+ Args:
+ async_connector: DummyAsyncConnector
+ faker: Faker
+ """
+ events = [
+ faker.json(
+ data_columns={
+ "Spec": "@1.0.1",
+ "ID": "pyint",
+ "Details": {"Name": "name", "Address": "address"},
+ },
+ num_rows=1,
+ )
+ ]
+
+ single_event_id = faker.uuid4()
+
+ request_url = urljoin(async_connector.configuration.intake_server, "/batch")
+
+ with aioresponses() as mocked_responses:
+ mocked_responses.post(
+ request_url,
+ status=200,
+ payload={"received": True, "event_ids": [single_event_id]},
+ )
+
+ result = await async_connector.push_data_to_intakes(events)
+
+ assert result == [single_event_id]
+
+
+@pytest.mark.asyncio
+async def test_async_connector_push_multiple_events(
+ async_connector: DummyAsyncConnector, faker: Faker
+):
+ """
+ Test async connector push events.
+
+ Args:
+ async_connector: DummyAsyncConnector
+ faker: Faker
+ """
+ async_connector.configuration.chunk_size = 1
+
+ events = [
+ faker.json(
+ data_columns={
+ "Spec": "@1.0.1",
+ "ID": "pyint",
+ "Details": {"Name": "name", "Address": "address"},
+ },
+ num_rows=1,
+ )
+ for _ in range(100)
+ ]
+
+ single_event_id = faker.uuid4()
+
+ request_url = urljoin(async_connector.configuration.intake_server, "/batch")
+
+ with aioresponses() as mocked_responses:
+ for _ in range(100):
+ mocked_responses.post(
+ request_url,
+ status=200,
+ payload={"received": True, "event_ids": [single_event_id]},
+ )
+
+ result = await async_connector.push_data_to_intakes(events)
+
+ assert result == [single_event_id for _ in range(100)]
+
+
+@pytest.mark.asyncio
+async def test_async_connector_raise_error(
+ async_connector: DummyAsyncConnector, faker: Faker
+):
+ """
+ Test async connector push events.
+
+ Args:
+ async_connector: DummyAsyncConnector
+ faker: Faker
+ """
+ async_connector.configuration.chunk_size = 1
+
+ events = [
+ faker.json(
+ data_columns={
+ "Spec": "@1.0.1",
+ "ID": "pyint",
+ "Details": {"Name": "name", "Address": "address"},
+ },
+ num_rows=1,
+ )
+ ]
+
+ async_connector._retry = lambda: Retrying(
+ reraise=True,
+ stop=stop_after_attempt(1),
+ )
+
+ request_url = urljoin(async_connector.configuration.intake_server, "/batch")
+
+ with aioresponses() as mocked_responses:
+ for _ in range(2):
+ mocked_responses.post(
+ request_url,
+ status=400,
+ payload={"message_error": "custom message"},
+ )
+
+ expected_error = 'Chunk 0 error: {"message_error": "custom message"}'
+
+ try:
+ await async_connector.push_data_to_intakes(events)
+
+ except Exception as e:
+ assert isinstance(e, RuntimeError)
+ assert str(e) == expected_error
diff --git a/tests/conftest.py b/tests/conftest.py
index 4f2fd8e..bb0cc18 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -1,3 +1,5 @@
+import asyncio
+import random
from pathlib import Path
from shutil import rmtree
from tempfile import mkdtemp
@@ -5,6 +7,7 @@
import pytest
import requests_mock
+from faker import Faker
from sekoia_automation import config
from sekoia_automation import storage as storage_module
@@ -71,3 +74,59 @@ def mocked_trigger_logs():
mock.post("http://sekoia-playbooks/logs")
yield mock
+
+
+@pytest.fixture(scope="session")
+def event_loop():
+ """
+ Create event loop for pytest.mark.asyncio.
+
+ Yields:
+ loop:
+ """
+ policy = asyncio.get_event_loop_policy()
+ loop = policy.new_event_loop()
+
+ yield loop
+
+ loop.close()
+
+
+@pytest.fixture(scope="session")
+def faker_locale() -> list[str]:
+ """
+ Configure Faker to use correct locale.
+
+ Returns:
+ List[str]:
+ """
+ return ["en"]
+
+
+@pytest.fixture(scope="session")
+def faker_seed() -> int:
+ """
+ Configure Faker to use correct seed.
+
+ Returns:
+ int:
+ """
+ return random.randint(1, 10000)
+
+
+@pytest.fixture(scope="session")
+def session_faker(faker_locale: list[str], faker_seed: int) -> Faker:
+ """
+ Configure session lvl Faker to use correct seed and locale.
+
+ Args:
+ faker_locale: List[str]
+ faker_seed: int
+
+ Returns:
+ Faker:
+ """
+ instance = Faker(locale=faker_locale)
+ instance.seed_instance(seed=faker_seed)
+
+ return instance
diff --git a/tests/loguru/__init__.py b/tests/loguru/__init__.py
new file mode 100644
index 0000000..6cdf751
--- /dev/null
+++ b/tests/loguru/__init__.py
@@ -0,0 +1 @@
+"""Tests for sekoia_automation.loguru module."""
diff --git a/tests/loguru/test_loguru_config.py b/tests/loguru/test_loguru_config.py
new file mode 100644
index 0000000..14f1177
--- /dev/null
+++ b/tests/loguru/test_loguru_config.py
@@ -0,0 +1,41 @@
+"""Config tests."""
+
+import pytest
+from pydantic import ValidationError
+
+from sekoia_automation.loguru.config import LoggingConfig
+
+
+@pytest.mark.asyncio
+async def test_config_default_values():
+ """Test config default init."""
+ config = LoggingConfig()
+ expected = (
+ "{time:YYYY-MM-DD HH:mm:ss.SSS}"
+ + " | {level: <5} | {message}"
+ )
+
+ assert config.log_lvl == "INFO"
+ assert config.log_file == "logs/{time:YYYY-MM-DD}.log"
+ assert config.log_rotation == "00:00"
+ assert config.log_retention == "1 month"
+ assert config.log_compression == "zip"
+ assert config.log_queue
+ assert config.json_logs is False
+ assert config.loguru_format == expected
+
+
+@pytest.mark.asyncio
+async def test_config_assemble_log_lvl():
+ """Test config assemble log lvl."""
+ valid_log_lvl = "debug"
+ config = LoggingConfig(log_lvl=valid_log_lvl)
+ assert config.log_lvl.upper() == valid_log_lvl.upper()
+
+ invalid_log_lvl = "invalid"
+
+ try:
+ LoggingConfig(log_lvl=invalid_log_lvl)
+
+ except ValidationError as e:
+ assert "Incorrect log lvl variable" in str(e)
diff --git a/tests/loguru/test_loguru_formatters.py b/tests/loguru/test_loguru_formatters.py
new file mode 100644
index 0000000..7447419
--- /dev/null
+++ b/tests/loguru/test_loguru_formatters.py
@@ -0,0 +1,52 @@
+"""Formatters tests."""
+import pytest
+
+from sekoia_automation.loguru.formatters import format_record
+
+
+@pytest.mark.asyncio
+async def test_formatted_record_non_empty(session_faker):
+ """
+ Test format record to correct string.
+
+ Args:
+ session_faker: Faker
+ """
+ log_str = session_faker.sentence(nb_words=10)
+
+ str_format = "".join(
+ [
+ "{time:YYYY-MM-DD HH:mm:ss.SSS} | ",
+ "{level: <5} | ",
+ "{name}:{function}:{line} - ",
+ "{message}",
+ ],
+ )
+
+ expected_result = "".join(
+ [str_format, "\n{extra[payload]}{exception}\n"],
+ )
+
+ assert expected_result == format_record(
+ {"extra": {"payload": {"data": log_str}}},
+ str_format,
+ )
+
+
+@pytest.mark.asyncio()
+async def test_formatted_record_empty():
+ """Test format record to correct string if payload is empty."""
+ str_format = "".join(
+ [
+ "{time:YYYY-MM-DD HH:mm:ss.SSS} | ",
+ "{level: <5} | ",
+ "{name}:{function}:{line} - ",
+ "{message}",
+ ],
+ )
+
+ expected_result = "".join(
+ [str_format, "{exception}\n"],
+ )
+
+ assert expected_result == format_record({"extra": {}}, str_format)
diff --git a/tests/loguru/test_loguru_handler.py b/tests/loguru/test_loguru_handler.py
new file mode 100644
index 0000000..3972959
--- /dev/null
+++ b/tests/loguru/test_loguru_handler.py
@@ -0,0 +1,45 @@
+"""Tests related to logging."""
+import logging
+from logging import LogRecord
+
+import pytest
+
+from sekoia_automation.loguru.config import init_logging
+from sekoia_automation.loguru.handlers import InterceptHandler
+
+
+@pytest.fixture
+def logger_handler() -> InterceptHandler:
+ """
+ InterceptHandler fixture.
+ Returns:
+ InterceptHandler:
+ """
+ return InterceptHandler()
+
+
+@pytest.mark.asyncio
+async def test_logging_emit_with_existing_loguru_level(logger_handler):
+ """
+ Test logging emit with existing loguru level.
+ Args:
+ logger_handler: InterceptHandler
+ """
+ record = LogRecord("name", 30, "pathname", 10, "message", (), None)
+ logger_handler.emit(record)
+
+ try:
+ record1 = LogRecord("name", 100500, "pathname", 10, "message", (), None)
+ logger_handler.emit(record1)
+ except ValueError as e:
+ assert str(e) == "Level 'Level 100500' does not exist"
+
+
+@pytest.mark.asyncio
+async def test_logging_log_message():
+ """
+ Test logging emit with existing loguru level.
+ """
+ init_logging()
+
+ assert logging.root.handlers != []