From 9413a987d950057e139fb6bcedf1d34a4f183f99 Mon Sep 17 00:00:00 2001 From: Aleksandr Kuznetsov Date: Sat, 2 Nov 2024 15:38:35 +0500 Subject: [PATCH] Initial project (#1) Add base metrics code, tests, package boilerplate, github checks --- .github/workflows/python-linters.yml | 32 ++ .github/workflows/python-tests.yml | 27 ++ .gitignore | 137 +++++++ LICENSE | 21 ++ README.md | 55 +++ pdm.lock | 501 ++++++++++++++++++++++++++ pyproject.toml | 144 ++++++++ src/huntflow_base_metrics/__init__.py | 23 ++ src/huntflow_base_metrics/_context.py | 23 ++ src/huntflow_base_metrics/base.py | 199 ++++++++++ src/huntflow_base_metrics/export.py | 68 ++++ src/huntflow_base_metrics/fastapi.py | 135 +++++++ tests/__init__.py | 0 tests/conftest.py | 9 + tests/test_base.py | 29 ++ tests/test_export.py | 57 +++ tests/test_fastapi_metrics.py | 218 +++++++++++ tests/test_observe_metrics.py | 89 +++++ 18 files changed, 1767 insertions(+) create mode 100644 .github/workflows/python-linters.yml create mode 100644 .github/workflows/python-tests.yml create mode 100644 .gitignore create mode 100644 LICENSE create mode 100644 pdm.lock create mode 100644 pyproject.toml create mode 100644 src/huntflow_base_metrics/__init__.py create mode 100644 src/huntflow_base_metrics/_context.py create mode 100644 src/huntflow_base_metrics/base.py create mode 100644 src/huntflow_base_metrics/export.py create mode 100644 src/huntflow_base_metrics/fastapi.py create mode 100644 tests/__init__.py create mode 100644 tests/conftest.py create mode 100644 tests/test_base.py create mode 100644 tests/test_export.py create mode 100644 tests/test_fastapi_metrics.py create mode 100644 tests/test_observe_metrics.py diff --git a/.github/workflows/python-linters.yml b/.github/workflows/python-linters.yml new file mode 100644 index 0000000..89b1dde --- /dev/null +++ b/.github/workflows/python-linters.yml @@ -0,0 +1,32 @@ +name: Linters + +on: + push: + pull_request: + branches: [ master ] + +jobs: + build: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ['3.8.16'] + + steps: + - uses: actions/checkout@v4 + - name: Set up PDM + uses: pdm-project/setup-pdm@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Install dependencies + run: pdm sync -dG lint + + - name: Run lint + run: pdm run ruff check + + - name: Run format check + run: pdm run ruff format --check + + - name: Run mypy + run: pdm run mypy src diff --git a/.github/workflows/python-tests.yml b/.github/workflows/python-tests.yml new file mode 100644 index 0000000..44fed15 --- /dev/null +++ b/.github/workflows/python-tests.yml @@ -0,0 +1,27 @@ +name: Tests + +on: + push: + pull_request: + branches: [ master ] + +jobs: + test: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ['3.8', '3.9', '3.10', '3.11', '3.12', '3.13'] + + steps: + - uses: actions/checkout@v4 + - name: Set up PDM + uses: pdm-project/setup-pdm@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Install dependencies + run: pdm sync -dG pytest + + - name: Run Tests + run: pdm run pytest + diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..3255062 --- /dev/null +++ b/.gitignore @@ -0,0 +1,137 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +.ruff_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# InteliJ IDEA +.idea/ + +# VSCode +.vscode/ +.pdm-python diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..bef20e6 --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2024 Huntflow + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/README.md b/README.md index c0a37ce..1c43c0c 100644 --- a/README.md +++ b/README.md @@ -2,3 +2,58 @@ Base definitions for metrics collection via prometheus client library. Intended to be used in Huntflow fastapi-based services: ready-to use collectors to measure HTTP requests and responses. Also provides universal decorator to observe timings of custom methods/functions. + +# How to use + +## How to collect metrics for FastAPI requests + +```python +from contextlib import asynccontextmanager + +from fastAPI import FastAPI + +from huntflow_base_metrics import start_metrics, stop_metrics, add_middleware + + +# Service name (in most cases should be provided in `FACILITY_NAME` environment variable) +FACILITY_NAME = "my-service-name" +# Service instance name (should provided in `FACILITY_ID` environment variable) +FACILITY_ID = "qwerty" + + +@asynccontextmanager +async def lifespan(app: FastAPI): + await onstartup(app) + yield + await onshutdown(app) + + +async def onstartup(app: FastAPI): + # do some startup actions + pass + +async def onshutdown(app: FastAPI): + # do some shutdown actions + stop_metrics() + + +def create_app() + app = FastAPI(lifespan=lifespan) + + start_metrics( + FACILITY_NAME, + FACILITY_ID, + # Optional, only needed if metrics are collected from files. + # Also, it's mandatory if write_to_file is True + out_file_path=f"/app/metrics/{FACILITY_NAME}-{FACILITY_ID}.prom", + enabled=True, + write_to_file=True, + # interval in seconds to dump metrics to a file + file_update_interval=15, + ) + add_middleware(app) + return app + +``` + +# TODO: another use-cases and development notes diff --git a/pdm.lock b/pdm.lock new file mode 100644 index 0000000..2174248 --- /dev/null +++ b/pdm.lock @@ -0,0 +1,501 @@ +# This file is @generated by PDM. +# It is not intended for manual editing. + +[metadata] +groups = ["default", "lint", "pytest"] +strategy = ["inherit_metadata"] +lock_version = "4.5.0" +content_hash = "sha256:97e7acdaaf25e315b34f9858a8b151b76dc8f0bc1a688ba6b08b8557dd09b635" + +[[metadata.targets]] +requires_python = ">=3.8" + +[[package]] +name = "aiofiles" +version = "24.1.0" +requires_python = ">=3.8" +summary = "File support for asyncio." +groups = ["default"] +files = [ + {file = "aiofiles-24.1.0-py3-none-any.whl", hash = "sha256:b4ec55f4195e3eb5d7abd1bf7e061763e864dd4954231fb8539a0ef8bb8260e5"}, + {file = "aiofiles-24.1.0.tar.gz", hash = "sha256:22a075c9e5a3810f0c2e48f3008c94d68c65d763b9b03857924c99e57355166c"}, +] + +[[package]] +name = "annotated-types" +version = "0.7.0" +requires_python = ">=3.8" +summary = "Reusable constraint types to use with typing.Annotated" +groups = ["default"] +dependencies = [ + "typing-extensions>=4.0.0; python_version < \"3.9\"", +] +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + +[[package]] +name = "anyio" +version = "4.5.2" +requires_python = ">=3.8" +summary = "High level compatibility layer for multiple asynchronous event loop implementations" +groups = ["default", "pytest"] +dependencies = [ + "exceptiongroup>=1.0.2; python_version < \"3.11\"", + "idna>=2.8", + "sniffio>=1.1", + "typing-extensions>=4.1; python_version < \"3.11\"", +] +files = [ + {file = "anyio-4.5.2-py3-none-any.whl", hash = "sha256:c011ee36bc1e8ba40e5a81cb9df91925c218fe9b778554e0b56a21e1b5d4716f"}, + {file = "anyio-4.5.2.tar.gz", hash = "sha256:23009af4ed04ce05991845451e11ef02fc7c5ed29179ac9a420e5ad0ac7ddc5b"}, +] + +[[package]] +name = "certifi" +version = "2024.8.30" +requires_python = ">=3.6" +summary = "Python package for providing Mozilla's CA Bundle." +groups = ["pytest"] +files = [ + {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, + {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +summary = "Cross-platform colored terminal text." +groups = ["pytest"] +marker = "sys_platform == \"win32\"" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.2" +requires_python = ">=3.7" +summary = "Backport of PEP 654 (exception groups)" +groups = ["default", "pytest"] +marker = "python_version < \"3.11\"" +files = [ + {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, + {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, +] + +[[package]] +name = "fastapi" +version = "0.115.4" +requires_python = ">=3.8" +summary = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" +groups = ["default"] +dependencies = [ + "pydantic!=1.8,!=1.8.1,!=2.0.0,!=2.0.1,!=2.1.0,<3.0.0,>=1.7.4", + "starlette<0.42.0,>=0.40.0", + "typing-extensions>=4.8.0", +] +files = [ + {file = "fastapi-0.115.4-py3-none-any.whl", hash = "sha256:0b504a063ffb3cf96a5e27dc1bc32c80ca743a2528574f9cdc77daa2d31b4742"}, + {file = "fastapi-0.115.4.tar.gz", hash = "sha256:db653475586b091cb8b2fec2ac54a680ac6a158e07406e1abae31679e8826349"}, +] + +[[package]] +name = "h11" +version = "0.14.0" +requires_python = ">=3.7" +summary = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +groups = ["pytest"] +dependencies = [ + "typing-extensions; python_version < \"3.8\"", +] +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + +[[package]] +name = "httpcore" +version = "1.0.6" +requires_python = ">=3.8" +summary = "A minimal low-level HTTP client." +groups = ["pytest"] +dependencies = [ + "certifi", + "h11<0.15,>=0.13", +] +files = [ + {file = "httpcore-1.0.6-py3-none-any.whl", hash = "sha256:27b59625743b85577a8c0e10e55b50b5368a4f2cfe8cc7bcfa9cf00829c2682f"}, + {file = "httpcore-1.0.6.tar.gz", hash = "sha256:73f6dbd6eb8c21bbf7ef8efad555481853f5f6acdeaff1edb0694289269ee17f"}, +] + +[[package]] +name = "httpx" +version = "0.27.2" +requires_python = ">=3.8" +summary = "The next generation HTTP client." +groups = ["pytest"] +dependencies = [ + "anyio", + "certifi", + "httpcore==1.*", + "idna", + "sniffio", +] +files = [ + {file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"}, + {file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"}, +] + +[[package]] +name = "idna" +version = "3.10" +requires_python = ">=3.6" +summary = "Internationalized Domain Names in Applications (IDNA)" +groups = ["default", "pytest"] +files = [ + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +requires_python = ">=3.7" +summary = "brain-dead simple config-ini parsing" +groups = ["pytest"] +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "mypy" +version = "1.13.0" +requires_python = ">=3.8" +summary = "Optional static typing for Python" +groups = ["lint"] +dependencies = [ + "mypy-extensions>=1.0.0", + "tomli>=1.1.0; python_version < \"3.11\"", + "typing-extensions>=4.6.0", +] +files = [ + {file = "mypy-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6607e0f1dd1fb7f0aca14d936d13fd19eba5e17e1cd2a14f808fa5f8f6d8f60a"}, + {file = "mypy-1.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8a21be69bd26fa81b1f80a61ee7ab05b076c674d9b18fb56239d72e21d9f4c80"}, + {file = "mypy-1.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b2353a44d2179846a096e25691d54d59904559f4232519d420d64da6828a3a7"}, + {file = "mypy-1.13.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0730d1c6a2739d4511dc4253f8274cdd140c55c32dfb0a4cf8b7a43f40abfa6f"}, + {file = "mypy-1.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:c5fc54dbb712ff5e5a0fca797e6e0aa25726c7e72c6a5850cfd2adbc1eb0a372"}, + {file = "mypy-1.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:581665e6f3a8a9078f28d5502f4c334c0c8d802ef55ea0e7276a6e409bc0d82d"}, + {file = "mypy-1.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3ddb5b9bf82e05cc9a627e84707b528e5c7caaa1c55c69e175abb15a761cec2d"}, + {file = "mypy-1.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:20c7ee0bc0d5a9595c46f38beb04201f2620065a93755704e141fcac9f59db2b"}, + {file = "mypy-1.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3790ded76f0b34bc9c8ba4def8f919dd6a46db0f5a6610fb994fe8efdd447f73"}, + {file = "mypy-1.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:51f869f4b6b538229c1d1bcc1dd7d119817206e2bc54e8e374b3dfa202defcca"}, + {file = "mypy-1.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5c7051a3461ae84dfb5dd15eff5094640c61c5f22257c8b766794e6dd85e72d5"}, + {file = "mypy-1.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39bb21c69a5d6342f4ce526e4584bc5c197fd20a60d14a8624d8743fffb9472e"}, + {file = "mypy-1.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:164f28cb9d6367439031f4c81e84d3ccaa1e19232d9d05d37cb0bd880d3f93c2"}, + {file = "mypy-1.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a4c1bfcdbce96ff5d96fc9b08e3831acb30dc44ab02671eca5953eadad07d6d0"}, + {file = "mypy-1.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:a0affb3a79a256b4183ba09811e3577c5163ed06685e4d4b46429a271ba174d2"}, + {file = "mypy-1.13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a7b44178c9760ce1a43f544e595d35ed61ac2c3de306599fa59b38a6048e1aa7"}, + {file = "mypy-1.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d5092efb8516d08440e36626f0153b5006d4088c1d663d88bf79625af3d1d62"}, + {file = "mypy-1.13.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de2904956dac40ced10931ac967ae63c5089bd498542194b436eb097a9f77bc8"}, + {file = "mypy-1.13.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:7bfd8836970d33c2105562650656b6846149374dc8ed77d98424b40b09340ba7"}, + {file = "mypy-1.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:9f73dba9ec77acb86457a8fc04b5239822df0c14a082564737833d2963677dbc"}, + {file = "mypy-1.13.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:100fac22ce82925f676a734af0db922ecfea991e1d7ec0ceb1e115ebe501301a"}, + {file = "mypy-1.13.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7bcb0bb7f42a978bb323a7c88f1081d1b5dee77ca86f4100735a6f541299d8fb"}, + {file = "mypy-1.13.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bde31fc887c213e223bbfc34328070996061b0833b0a4cfec53745ed61f3519b"}, + {file = "mypy-1.13.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07de989f89786f62b937851295ed62e51774722e5444a27cecca993fc3f9cd74"}, + {file = "mypy-1.13.0-cp38-cp38-win_amd64.whl", hash = "sha256:4bde84334fbe19bad704b3f5b78c4abd35ff1026f8ba72b29de70dda0916beb6"}, + {file = "mypy-1.13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0246bcb1b5de7f08f2826451abd947bf656945209b140d16ed317f65a17dc7dc"}, + {file = "mypy-1.13.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7f5b7deae912cf8b77e990b9280f170381fdfbddf61b4ef80927edd813163732"}, + {file = "mypy-1.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7029881ec6ffb8bc233a4fa364736789582c738217b133f1b55967115288a2bc"}, + {file = "mypy-1.13.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3e38b980e5681f28f033f3be86b099a247b13c491f14bb8b1e1e134d23bb599d"}, + {file = "mypy-1.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:a6789be98a2017c912ae6ccb77ea553bbaf13d27605d2ca20a76dfbced631b24"}, + {file = "mypy-1.13.0-py3-none-any.whl", hash = "sha256:9c250883f9fd81d212e0952c92dbfcc96fc237f4b7c92f56ac81fd48460b3e5a"}, + {file = "mypy-1.13.0.tar.gz", hash = "sha256:0291a61b6fbf3e6673e3405cfcc0e7650bebc7939659fdca2702958038bd835e"}, +] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +requires_python = ">=3.5" +summary = "Type system extensions for programs checked with the mypy type checker." +groups = ["lint"] +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "packaging" +version = "24.1" +requires_python = ">=3.8" +summary = "Core utilities for Python packages" +groups = ["pytest"] +files = [ + {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, + {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, +] + +[[package]] +name = "pluggy" +version = "1.5.0" +requires_python = ">=3.8" +summary = "plugin and hook calling mechanisms for python" +groups = ["pytest"] +files = [ + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, +] + +[[package]] +name = "prometheus-client" +version = "0.21.0" +requires_python = ">=3.8" +summary = "Python client for the Prometheus monitoring system." +groups = ["default"] +files = [ + {file = "prometheus_client-0.21.0-py3-none-any.whl", hash = "sha256:4fa6b4dd0ac16d58bb587c04b1caae65b8c5043e85f778f42f5f632f6af2e166"}, + {file = "prometheus_client-0.21.0.tar.gz", hash = "sha256:96c83c606b71ff2b0a433c98889d275f51ffec6c5e267de37c7a2b5c9aa9233e"}, +] + +[[package]] +name = "pydantic" +version = "2.9.2" +requires_python = ">=3.8" +summary = "Data validation using Python type hints" +groups = ["default"] +dependencies = [ + "annotated-types>=0.6.0", + "pydantic-core==2.23.4", + "typing-extensions>=4.12.2; python_version >= \"3.13\"", + "typing-extensions>=4.6.1; python_version < \"3.13\"", +] +files = [ + {file = "pydantic-2.9.2-py3-none-any.whl", hash = "sha256:f048cec7b26778210e28a0459867920654d48e5e62db0958433636cde4254f12"}, + {file = "pydantic-2.9.2.tar.gz", hash = "sha256:d155cef71265d1e9807ed1c32b4c8deec042a44a50a4188b25ac67ecd81a9c0f"}, +] + +[[package]] +name = "pydantic-core" +version = "2.23.4" +requires_python = ">=3.8" +summary = "Core functionality for Pydantic validation and serialization" +groups = ["default"] +dependencies = [ + "typing-extensions!=4.7.0,>=4.6.0", +] +files = [ + {file = "pydantic_core-2.23.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:b10bd51f823d891193d4717448fab065733958bdb6a6b351967bd349d48d5c9b"}, + {file = "pydantic_core-2.23.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4fc714bdbfb534f94034efaa6eadd74e5b93c8fa6315565a222f7b6f42ca1166"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63e46b3169866bd62849936de036f901a9356e36376079b05efa83caeaa02ceb"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed1a53de42fbe34853ba90513cea21673481cd81ed1be739f7f2efb931b24916"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cfdd16ab5e59fc31b5e906d1a3f666571abc367598e3e02c83403acabc092e07"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255a8ef062cbf6674450e668482456abac99a5583bbafb73f9ad469540a3a232"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a7cd62e831afe623fbb7aabbb4fe583212115b3ef38a9f6b71869ba644624a2"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f09e2ff1f17c2b51f2bc76d1cc33da96298f0a036a137f5440ab3ec5360b624f"}, + {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e38e63e6f3d1cec5a27e0afe90a085af8b6806ee208b33030e65b6516353f1a3"}, + {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0dbd8dbed2085ed23b5c04afa29d8fd2771674223135dc9bc937f3c09284d071"}, + {file = "pydantic_core-2.23.4-cp310-none-win32.whl", hash = "sha256:6531b7ca5f951d663c339002e91aaebda765ec7d61b7d1e3991051906ddde119"}, + {file = "pydantic_core-2.23.4-cp310-none-win_amd64.whl", hash = "sha256:7c9129eb40958b3d4500fa2467e6a83356b3b61bfff1b414c7361d9220f9ae8f"}, + {file = "pydantic_core-2.23.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:77733e3892bb0a7fa797826361ce8a9184d25c8dffaec60b7ffe928153680ba8"}, + {file = "pydantic_core-2.23.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b84d168f6c48fabd1f2027a3d1bdfe62f92cade1fb273a5d68e621da0e44e6d"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df49e7a0861a8c36d089c1ed57d308623d60416dab2647a4a17fe050ba85de0e"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ff02b6d461a6de369f07ec15e465a88895f3223eb75073ffea56b84d9331f607"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:996a38a83508c54c78a5f41456b0103c30508fed9abcad0a59b876d7398f25fd"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d97683ddee4723ae8c95d1eddac7c192e8c552da0c73a925a89fa8649bf13eea"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:216f9b2d7713eb98cb83c80b9c794de1f6b7e3145eef40400c62e86cee5f4e1e"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6f783e0ec4803c787bcea93e13e9932edab72068f68ecffdf86a99fd5918878b"}, + {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d0776dea117cf5272382634bd2a5c1b6eb16767c223c6a5317cd3e2a757c61a0"}, + {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d5f7a395a8cf1621939692dba2a6b6a830efa6b3cee787d82c7de1ad2930de64"}, + {file = "pydantic_core-2.23.4-cp311-none-win32.whl", hash = "sha256:74b9127ffea03643e998e0c5ad9bd3811d3dac8c676e47db17b0ee7c3c3bf35f"}, + {file = "pydantic_core-2.23.4-cp311-none-win_amd64.whl", hash = "sha256:98d134c954828488b153d88ba1f34e14259284f256180ce659e8d83e9c05eaa3"}, + {file = "pydantic_core-2.23.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f3e0da4ebaef65158d4dfd7d3678aad692f7666877df0002b8a522cdf088f231"}, + {file = "pydantic_core-2.23.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f69a8e0b033b747bb3e36a44e7732f0c99f7edd5cea723d45bc0d6e95377ffee"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723314c1d51722ab28bfcd5240d858512ffd3116449c557a1336cbe3919beb87"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb2802e667b7051a1bebbfe93684841cc9351004e2badbd6411bf357ab8d5ac8"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d18ca8148bebe1b0a382a27a8ee60350091a6ddaf475fa05ef50dc35b5df6327"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33e3d65a85a2a4a0dc3b092b938a4062b1a05f3a9abde65ea93b233bca0e03f2"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:128585782e5bfa515c590ccee4b727fb76925dd04a98864182b22e89a4e6ed36"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:68665f4c17edcceecc112dfed5dbe6f92261fb9d6054b47d01bf6371a6196126"}, + {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:20152074317d9bed6b7a95ade3b7d6054845d70584216160860425f4fbd5ee9e"}, + {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9261d3ce84fa1d38ed649c3638feefeae23d32ba9182963e465d58d62203bd24"}, + {file = "pydantic_core-2.23.4-cp312-none-win32.whl", hash = "sha256:4ba762ed58e8d68657fc1281e9bb72e1c3e79cc5d464be146e260c541ec12d84"}, + {file = "pydantic_core-2.23.4-cp312-none-win_amd64.whl", hash = "sha256:97df63000f4fea395b2824da80e169731088656d1818a11b95f3b173747b6cd9"}, + {file = "pydantic_core-2.23.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7530e201d10d7d14abce4fb54cfe5b94a0aefc87da539d0346a484ead376c3cc"}, + {file = "pydantic_core-2.23.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:df933278128ea1cd77772673c73954e53a1c95a4fdf41eef97c2b779271bd0bd"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cb3da3fd1b6a5d0279a01877713dbda118a2a4fc6f0d821a57da2e464793f05"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c6dcb030aefb668a2b7009c85b27f90e51e6a3b4d5c9bc4c57631292015b0d"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:696dd8d674d6ce621ab9d45b205df149399e4bb9aa34102c970b721554828510"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2971bb5ffe72cc0f555c13e19b23c85b654dd2a8f7ab493c262071377bfce9f6"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8394d940e5d400d04cad4f75c0598665cbb81aecefaca82ca85bd28264af7f9b"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0dff76e0602ca7d4cdaacc1ac4c005e0ce0dcfe095d5b5259163a80d3a10d327"}, + {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7d32706badfe136888bdea71c0def994644e09fff0bfe47441deaed8e96fdbc6"}, + {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ed541d70698978a20eb63d8c5d72f2cc6d7079d9d90f6b50bad07826f1320f5f"}, + {file = "pydantic_core-2.23.4-cp313-none-win32.whl", hash = "sha256:3d5639516376dce1940ea36edf408c554475369f5da2abd45d44621cb616f769"}, + {file = "pydantic_core-2.23.4-cp313-none-win_amd64.whl", hash = "sha256:5a1504ad17ba4210df3a045132a7baeeba5a200e930f57512ee02909fc5c4cb5"}, + {file = "pydantic_core-2.23.4-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d4488a93b071c04dc20f5cecc3631fc78b9789dd72483ba15d423b5b3689b555"}, + {file = "pydantic_core-2.23.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:81965a16b675b35e1d09dd14df53f190f9129c0202356ed44ab2728b1c905658"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ffa2ebd4c8530079140dd2d7f794a9d9a73cbb8e9d59ffe24c63436efa8f271"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:61817945f2fe7d166e75fbfb28004034b48e44878177fc54d81688e7b85a3665"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29d2c342c4bc01b88402d60189f3df065fb0dda3654744d5a165a5288a657368"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5e11661ce0fd30a6790e8bcdf263b9ec5988e95e63cf901972107efc49218b13"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d18368b137c6295db49ce7218b1a9ba15c5bc254c96d7c9f9e924a9bc7825ad"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ec4e55f79b1c4ffb2eecd8a0cfba9955a2588497d96851f4c8f99aa4a1d39b12"}, + {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:374a5e5049eda9e0a44c696c7ade3ff355f06b1fe0bb945ea3cac2bc336478a2"}, + {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5c364564d17da23db1106787675fc7af45f2f7b58b4173bfdd105564e132e6fb"}, + {file = "pydantic_core-2.23.4-cp38-none-win32.whl", hash = "sha256:d7a80d21d613eec45e3d41eb22f8f94ddc758a6c4720842dc74c0581f54993d6"}, + {file = "pydantic_core-2.23.4-cp38-none-win_amd64.whl", hash = "sha256:5f5ff8d839f4566a474a969508fe1c5e59c31c80d9e140566f9a37bba7b8d556"}, + {file = "pydantic_core-2.23.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a4fa4fc04dff799089689f4fd502ce7d59de529fc2f40a2c8836886c03e0175a"}, + {file = "pydantic_core-2.23.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a7df63886be5e270da67e0966cf4afbae86069501d35c8c1b3b6c168f42cb36"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcedcd19a557e182628afa1d553c3895a9f825b936415d0dbd3cd0bbcfd29b4b"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f54b118ce5de9ac21c363d9b3caa6c800341e8c47a508787e5868c6b79c9323"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86d2f57d3e1379a9525c5ab067b27dbb8a0642fb5d454e17a9ac434f9ce523e3"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de6d1d1b9e5101508cb37ab0d972357cac5235f5c6533d1071964c47139257df"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1278e0d324f6908e872730c9102b0112477a7f7cf88b308e4fc36ce1bdb6d58c"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9a6b5099eeec78827553827f4c6b8615978bb4b6a88e5d9b93eddf8bb6790f55"}, + {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e55541f756f9b3ee346b840103f32779c695a19826a4c442b7954550a0972040"}, + {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a5c7ba8ffb6d6f8f2ab08743be203654bb1aaa8c9dcb09f82ddd34eadb695605"}, + {file = "pydantic_core-2.23.4-cp39-none-win32.whl", hash = "sha256:37b0fe330e4a58d3c58b24d91d1eb102aeec675a3db4c292ec3928ecd892a9a6"}, + {file = "pydantic_core-2.23.4-cp39-none-win_amd64.whl", hash = "sha256:1498bec4c05c9c787bde9125cfdcc63a41004ff167f495063191b863399b1a29"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f455ee30a9d61d3e1a15abd5068827773d6e4dc513e795f380cdd59932c782d5"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1e90d2e3bd2c3863d48525d297cd143fe541be8bbf6f579504b9712cb6b643ec"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e203fdf807ac7e12ab59ca2bfcabb38c7cf0b33c41efeb00f8e5da1d86af480"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e08277a400de01bc72436a0ccd02bdf596631411f592ad985dcee21445bd0068"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f220b0eea5965dec25480b6333c788fb72ce5f9129e8759ef876a1d805d00801"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d06b0c8da4f16d1d1e352134427cb194a0a6e19ad5db9161bf32b2113409e728"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ba1a0996f6c2773bd83e63f18914c1de3c9dd26d55f4ac302a7efe93fb8e7433"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:9a5bce9d23aac8f0cf0836ecfc033896aa8443b501c58d0602dbfd5bd5b37753"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:78ddaaa81421a29574a682b3179d4cf9e6d405a09b99d93ddcf7e5239c742e21"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:883a91b5dd7d26492ff2f04f40fbb652de40fcc0afe07e8129e8ae779c2110eb"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88ad334a15b32a791ea935af224b9de1bf99bcd62fabf745d5f3442199d86d59"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:233710f069d251feb12a56da21e14cca67994eab08362207785cf8c598e74577"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:19442362866a753485ba5e4be408964644dd6a09123d9416c54cd49171f50744"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:624e278a7d29b6445e4e813af92af37820fafb6dcc55c012c834f9e26f9aaaef"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f5ef8f42bec47f21d07668a043f077d507e5bf4e668d5c6dfe6aaba89de1a5b8"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:aea443fffa9fbe3af1a9ba721a87f926fe548d32cab71d188a6ede77d0ff244e"}, + {file = "pydantic_core-2.23.4.tar.gz", hash = "sha256:2584f7cf844ac4d970fba483a717dbe10c1c1c96a969bf65d61ffe94df1b2863"}, +] + +[[package]] +name = "pytest" +version = "8.3.3" +requires_python = ">=3.8" +summary = "pytest: simple powerful testing with Python" +groups = ["pytest"] +dependencies = [ + "colorama; sys_platform == \"win32\"", + "exceptiongroup>=1.0.0rc8; python_version < \"3.11\"", + "iniconfig", + "packaging", + "pluggy<2,>=1.5", + "tomli>=1; python_version < \"3.11\"", +] +files = [ + {file = "pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2"}, + {file = "pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181"}, +] + +[[package]] +name = "pytest-asyncio" +version = "0.24.0" +requires_python = ">=3.8" +summary = "Pytest support for asyncio" +groups = ["pytest"] +dependencies = [ + "pytest<9,>=8.2", +] +files = [ + {file = "pytest_asyncio-0.24.0-py3-none-any.whl", hash = "sha256:a811296ed596b69bf0b6f3dc40f83bcaf341b155a269052d82efa2b25ac7037b"}, + {file = "pytest_asyncio-0.24.0.tar.gz", hash = "sha256:d081d828e576d85f875399194281e92bf8a68d60d72d1a2faf2feddb6c46b276"}, +] + +[[package]] +name = "ruff" +version = "0.7.1" +requires_python = ">=3.7" +summary = "An extremely fast Python linter and code formatter, written in Rust." +groups = ["lint"] +files = [ + {file = "ruff-0.7.1-py3-none-linux_armv6l.whl", hash = "sha256:cb1bc5ed9403daa7da05475d615739cc0212e861b7306f314379d958592aaa89"}, + {file = "ruff-0.7.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:27c1c52a8d199a257ff1e5582d078eab7145129aa02721815ca8fa4f9612dc35"}, + {file = "ruff-0.7.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:588a34e1ef2ea55b4ddfec26bbe76bc866e92523d8c6cdec5e8aceefeff02d99"}, + {file = "ruff-0.7.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94fc32f9cdf72dc75c451e5f072758b118ab8100727168a3df58502b43a599ca"}, + {file = "ruff-0.7.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:985818742b833bffa543a84d1cc11b5e6871de1b4e0ac3060a59a2bae3969250"}, + {file = "ruff-0.7.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32f1e8a192e261366c702c5fb2ece9f68d26625f198a25c408861c16dc2dea9c"}, + {file = "ruff-0.7.1-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:699085bf05819588551b11751eff33e9ca58b1b86a6843e1b082a7de40da1565"}, + {file = "ruff-0.7.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:344cc2b0814047dc8c3a8ff2cd1f3d808bb23c6658db830d25147339d9bf9ea7"}, + {file = "ruff-0.7.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4316bbf69d5a859cc937890c7ac7a6551252b6a01b1d2c97e8fc96e45a7c8b4a"}, + {file = "ruff-0.7.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79d3af9dca4c56043e738a4d6dd1e9444b6d6c10598ac52d146e331eb155a8ad"}, + {file = "ruff-0.7.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:c5c121b46abde94a505175524e51891f829414e093cd8326d6e741ecfc0a9112"}, + {file = "ruff-0.7.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:8422104078324ea250886954e48f1373a8fe7de59283d747c3a7eca050b4e378"}, + {file = "ruff-0.7.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:56aad830af8a9db644e80098fe4984a948e2b6fc2e73891538f43bbe478461b8"}, + {file = "ruff-0.7.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:658304f02f68d3a83c998ad8bf91f9b4f53e93e5412b8f2388359d55869727fd"}, + {file = "ruff-0.7.1-py3-none-win32.whl", hash = "sha256:b517a2011333eb7ce2d402652ecaa0ac1a30c114fbbd55c6b8ee466a7f600ee9"}, + {file = "ruff-0.7.1-py3-none-win_amd64.whl", hash = "sha256:f38c41fcde1728736b4eb2b18850f6d1e3eedd9678c914dede554a70d5241307"}, + {file = "ruff-0.7.1-py3-none-win_arm64.whl", hash = "sha256:19aa200ec824c0f36d0c9114c8ec0087082021732979a359d6f3c390a6ff2a37"}, + {file = "ruff-0.7.1.tar.gz", hash = "sha256:9d8a41d4aa2dad1575adb98a82870cf5db5f76b2938cf2206c22c940034a36f4"}, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +requires_python = ">=3.7" +summary = "Sniff out which async library your code is running under" +groups = ["default", "pytest"] +files = [ + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, +] + +[[package]] +name = "starlette" +version = "0.41.2" +requires_python = ">=3.8" +summary = "The little ASGI library that shines." +groups = ["default"] +dependencies = [ + "anyio<5,>=3.4.0", + "typing-extensions>=3.10.0; python_version < \"3.10\"", +] +files = [ + {file = "starlette-0.41.2-py3-none-any.whl", hash = "sha256:fbc189474b4731cf30fcef52f18a8d070e3f3b46c6a04c97579e85e6ffca942d"}, + {file = "starlette-0.41.2.tar.gz", hash = "sha256:9834fd799d1a87fd346deb76158668cfa0b0d56f85caefe8268e2d97c3468b62"}, +] + +[[package]] +name = "tomli" +version = "2.0.2" +requires_python = ">=3.8" +summary = "A lil' TOML parser" +groups = ["lint", "pytest"] +marker = "python_version < \"3.11\"" +files = [ + {file = "tomli-2.0.2-py3-none-any.whl", hash = "sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38"}, + {file = "tomli-2.0.2.tar.gz", hash = "sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed"}, +] + +[[package]] +name = "types-aiofiles" +version = "24.1.0.20240626" +requires_python = ">=3.8" +summary = "Typing stubs for aiofiles" +groups = ["default"] +files = [ + {file = "types-aiofiles-24.1.0.20240626.tar.gz", hash = "sha256:48604663e24bc2d5038eac05ccc33e75799b0779e93e13d6a8f711ddc306ac08"}, + {file = "types_aiofiles-24.1.0.20240626-py3-none-any.whl", hash = "sha256:7939eca4a8b4f9c6491b6e8ef160caee9a21d32e18534a57d5ed90aee47c66b4"}, +] + +[[package]] +name = "typing-extensions" +version = "4.12.2" +requires_python = ">=3.8" +summary = "Backported and Experimental Type Hints for Python 3.8+" +groups = ["default", "lint", "pytest"] +files = [ + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, +] diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..a2e27fa --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,144 @@ +[project] +name = "huntflow-base-metrics" +version = "0.1.0" +description = "Prometheus metrics for Huntflow fastapi-based services" +authors = [ + {name = "Aleksandr Kuznetsov", email = "akuznetsov@huntflow.ru"}, +] +dependencies = [ + "prometheus-client>=0.21.0", + "aiofiles>=24.1.0", + "fastapi>=0.66.0", + "types-aiofiles>=24.1.0.20240626", +] +requires-python = ">=3.8" +readme = "README.md" +license = {text = "MIT"} +classifiers = [ + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", +] + +[build-system] +requires = ["pdm-backend"] +build-backend = "pdm.backend" + +[tool.pdm] +distribution = true + +[tool.pdm.dev-dependencies] +lint = [ + "ruff>=0.7.1", + "mypy>=1.13.0", +] +pytest = [ + "pytest>=8.3.3", + "pytest-asyncio>=0.24.0", + "httpx>=0.27.2", +] + +[tool.ruff] +exclude = [ + ".bzr", + ".direnv", + ".eggs", + ".git", + ".git-rewrite", + ".hg", + ".ipynb_checkpoints", + ".mypy_cache", + ".nox", + ".pants.d", + ".pyenv", + ".pytest_cache", + ".pytype", + ".ruff_cache", + ".svn", + ".tox", + ".venv", + ".vscode", + "__pypackages__", + "_build", + "buck-out", + "build", + "dist", + "node_modules", + "site-packages", + "venv", +] +line-length = 100 +indent-width = 4 +target-version = "py38" + +[tool.ruff.lint] +ignore = [ + "E501", # line-too-long + "W191", # tab-indentation +] +select = [ + "B", # flake8-bugbear + "C4", # flake8-comprehensions + "E", # pycodestyle - Error + "F", # Pyflakes + "I", # isort + "W", # pycodestyle - Warning + "UP", # pyupgrade +] + +[tool.ruff.format] +# Like Black, use double quotes for strings. +quote-style = "double" + +# Like Black, indent with spaces, rather than tabs. +indent-style = "space" + +# Like Black, respect magic trailing commas. +skip-magic-trailing-comma = false + +# Like Black, automatically detect the appropriate line ending. +line-ending = "auto" + +# Enable auto-formatting of code examples in docstrings. Markdown, +# reStructuredText code/literal blocks and doctests are all supported. +# +# This is currently disabled by default, but it is planned for this +# to be opt-out in the future. +docstring-code-format = false + +# Set the line length limit used when formatting code snippets in +# docstrings. +# +# This only has an effect when the `docstring-code-format` setting is +# enabled. +docstring-code-line-length = "dynamic" + +[tool.pdm.scripts] +lint = "ruff check" +fmt = "ruff format ." + +[tool.pytest.ini_options] +pythonpath = [ + ".", "src" +] +asyncio_mode = "auto" +asyncio_default_fixture_loop_scope = "function" + +[tool.mypy] +mypy_path = "src" +disallow_untyped_defs = true +disallow_incomplete_defs = true +python_version = "3.8" +strict = false +show_error_codes = true +warn_unused_ignores = true + +[[tool.mypy.overrides]] +module = "tests.*" +disallow_untyped_defs = false +disallow_incomplete_defs = true +ignore_missing_imports = true diff --git a/src/huntflow_base_metrics/__init__.py b/src/huntflow_base_metrics/__init__.py new file mode 100644 index 0000000..e85fb1b --- /dev/null +++ b/src/huntflow_base_metrics/__init__.py @@ -0,0 +1,23 @@ +from .base import ( + apply_labels, + observe_metrics, + register_method_observe_gauge, + register_method_observe_histogram, + register_metric, + start_metrics, + stop_metrics, +) +from .export import export_to_http_response +from .fastapi import add_middleware + +__all__ = [ + "apply_labels", + "observe_metrics", + "register_method_observe_histogram", + "register_method_observe_gauge", + "register_metric", + "start_metrics", + "stop_metrics", + "add_middleware", + "export_to_http_response", +] diff --git a/src/huntflow_base_metrics/_context.py b/src/huntflow_base_metrics/_context.py new file mode 100644 index 0000000..1aa275d --- /dev/null +++ b/src/huntflow_base_metrics/_context.py @@ -0,0 +1,23 @@ +import asyncio +from dataclasses import dataclass, field +from typing import Dict, List, Optional, Set, Tuple + +from prometheus_client import CollectorRegistry +from prometheus_client.metrics import MetricWrapperBase + +__all__ = ["METRIC_CONTEXT"] + + +@dataclass +class _MetricsContext: + enable_metrics: bool = False + registry: Optional[CollectorRegistry] = None + write_to_file_task: Optional[asyncio.Task] = None + include_routes: Optional[Set[str]] = None + exclude_routes: Optional[Set[str]] = None + + metrics_by_names: Dict[str, MetricWrapperBase] = field(default_factory=dict) + metrics_by_objects: Dict[MetricWrapperBase, Tuple[str, List]] = field(default_factory=dict) + + +METRIC_CONTEXT = _MetricsContext() diff --git a/src/huntflow_base_metrics/base.py b/src/huntflow_base_metrics/base.py new file mode 100644 index 0000000..ff54fd9 --- /dev/null +++ b/src/huntflow_base_metrics/base.py @@ -0,0 +1,199 @@ +"""Base definitions for metrics collections via prometheus client.""" + +import inspect +import logging +import platform +import time +import uuid +from contextlib import suppress +from functools import wraps +from typing import Any, Callable, List, Optional, Type + +from prometheus_client import ( + CollectorRegistry, + Gauge, + Histogram, +) +from prometheus_client.metrics import MetricWrapperBase, T + +from ._context import METRIC_CONTEXT as _METRIC_CONTEXT +from .export import start_export_to_file, stop_export_to_file + +LOGGER = logging.getLogger(__name__) +REGISTRY = CollectorRegistry() +INSTANCE_ID = platform.node() or str(uuid.uuid4()) + + +# Label for service name, should be taken from FACILITY_NAME env +SERVICE_LABEL = "service" +# Label for a running instance, should be taken from FACILITY_ID env +POD_LABEL = "pod" + +# Labels must be present in all collectors. +# These labels identify the whole service and it's current instance. +# The values should be set via `start_metrics` function +# before usage. +COMMON_LABELS = [SERVICE_LABEL, POD_LABEL] +COMMON_LABELS_VALUES = { + SERVICE_LABEL: "undefined", + POD_LABEL: INSTANCE_ID, +} + + +def register_metric(type_: Type[T], name: str, description: str, labels: List[str]) -> T: + """Create and register a new metric with the given `type_`. + :param type_: a prometheus_client class, must be nested from + MetricWrapperBase class. Examples: Histogram, Counter, etc. + :param name: unique metric name + :param description: metric short description + :param labels: list of metric-specific labels. It shouldn't include + labels defined in COMMON_LABELS, because these labels will be added + implicitely. + + Raises ValueError if `name` is already registered. + """ + if name in _METRIC_CONTEXT.metrics_by_names: + raise ValueError(f"Metric '{name}' already registered") + metric = type_( + name, + description, + COMMON_LABELS + labels, + registry=REGISTRY, + ) + _METRIC_CONTEXT.metrics_by_names[name] = metric + _METRIC_CONTEXT.metrics_by_objects[metric] = (name, labels) + return metric + + +def register_method_observe_histogram(name: str, description: str) -> Histogram: + """Create and register a new Histogram. + The created Histogram will contain label `method` and is suitable to pass + it to `observe_metrics` decorator. + """ + return register_metric(Histogram, name, description, ["method"]) + + +def register_method_observe_gauge(name: str, description: str) -> Gauge: + """Create and register a new Gauge. + The created Gauge will contain label `method` and is suitable to pass + it to `observe_metrics` decorator. + """ + return register_metric(Gauge, name, description, ["method"]) + + +def get_metric(name: str) -> MetricWrapperBase: + return _METRIC_CONTEXT.metrics_by_names[name] + + +def apply_labels(metric: T, **labels: str) -> T: + """Apply labels for a given metric. + Requires the same lables that was passed during metric creation + (see `register_metric` method) + Checks if the given set of labels is the same that was defined + when the metrics was registered. If labels don't match, raises ValueError. + Also applies common labels values implicetly. + """ + metric_name, expected_labels = _METRIC_CONTEXT.metrics_by_objects[metric] + if set(expected_labels) != set(labels): + raise ValueError(f"Invalid labels set ({list(labels)}) for metric '{metric_name}'") + return metric.labels(**COMMON_LABELS_VALUES, **labels) + + +def start_metrics( + facility_name: str, + facility_id: str, + out_file_path: Optional[str] = None, + enabled: bool = True, + write_to_file: bool = False, + file_update_interval: float = 15, +) -> None: + """Method to initialize metrics_collection. + :params facility_name: string to specify a service/application name for metrics. + Will be passed to prometheus as `service` label for all metrics. + :param facility_id: string to specify an inistance/pod/container of the service. + If it's empty, then will be used HOSTNAME or a random string. + It will be passed to prometheus as `pod` label for all metrics. + :param out_file_path: path in filesystem where will be written metrics. + May be empty if `write_to_file` is False. + :param enabled: enable or disable metrics collection. + :param write_to_file: enable or disable writing metrics + to file `out_file_path`. + :param file_update_interval: pause in seconds between saving metrics to `out_file_path` file + """ + _METRIC_CONTEXT.enable_metrics = enabled + _METRIC_CONTEXT.registry = REGISTRY + if facility_name: + COMMON_LABELS_VALUES[SERVICE_LABEL] = facility_name + if facility_id: + COMMON_LABELS_VALUES[POD_LABEL] = facility_id + if enabled and write_to_file: + if not out_file_path: + raise ValueError("Empty file path while enabled writing to file") + start_export_to_file(out_file_path, file_update_interval) + for metric in _METRIC_CONTEXT.metrics_by_objects: + with suppress(ValueError): + # there is no public interface in registry/collectors to check if the + # metric is already registered. So just catch names conflict and + # ignore it + REGISTRY.register(metric) + + +def stop_metrics() -> None: + """Method to stop all background tasks initialized by `start_metrics`. + Actually handle only the background task to write metrics to a file. + """ + _METRIC_CONTEXT.enable_metrics = False + for metric in _METRIC_CONTEXT.metrics_by_objects: + with suppress(KeyError): + REGISTRY.unregister(metric) + metric.clear() + stop_export_to_file() + + +def observe_metrics( + method: str, metric_timings: Histogram, metric_inprogress: Optional[Gauge] = None +) -> Callable: + """Decorator to measure timings of some method + Applicable only for async methods. + :param method: `method` label value for observed method/function + :param metric_timings: histogram collector to observe timing + :param metric_inprogress: optional Gauge collector to observe in progress + counter. + """ + + def wrap(func: Callable) -> Callable: + @wraps(func) + async def wrapper(*args: Any, **kwargs: Any) -> Any: + if not _METRIC_CONTEXT.enable_metrics: + return await func(*args, **kwargs) + start = time.perf_counter() + if metric_inprogress is not None: + apply_labels(metric_inprogress, method=method).inc() + try: + return await func(*args, **kwargs) + finally: + end = time.perf_counter() + apply_labels(metric_timings, method=method).observe(end - start) + if metric_inprogress is not None: + apply_labels(metric_inprogress, method=method).dec() + + @wraps(func) + def sync_wrapper(*args: Any, **kwargs: Any) -> Any: + if not _METRIC_CONTEXT.enable_metrics: + return func(*args, **kwargs) + start = time.perf_counter() + if metric_inprogress is not None: + apply_labels(metric_inprogress, method=method).inc() + try: + return func(*args, **kwargs) + finally: + end = time.perf_counter() + apply_labels(metric_timings, method=method).observe(end - start) + if metric_inprogress is not None: + apply_labels(metric_inprogress, method=method).dec() + + if inspect.iscoroutinefunction(func): + return wrapper + return sync_wrapper + + return wrap diff --git a/src/huntflow_base_metrics/export.py b/src/huntflow_base_metrics/export.py new file mode 100644 index 0000000..483e5b7 --- /dev/null +++ b/src/huntflow_base_metrics/export.py @@ -0,0 +1,68 @@ +import asyncio +import logging +from typing import Any, Tuple + +import aiofiles +from prometheus_client import CONTENT_TYPE_LATEST, CollectorRegistry, generate_latest + +from ._context import METRIC_CONTEXT as _METRIC_CONTEXT + +LOGGER = logging.getLogger(__name__) + +# Limit for a series of exceptions during saving metrics to file. +# Just to not send too much spam to sentry. +# If we failed to write to a file with this limit, the most probably +# we will fail in the future too. Possible errors: +# * not mounted directory (only container restart will help) +# * insufficiient rights to write to the directory +# * no space left on the disk +_MAX_FILE_WRITE_ERRORS = 5 + + +async def _update_metric_file( + file_path: str, update_delay: float, registry: CollectorRegistry +) -> None: + LOGGER.info("Writing metrics to %s", file_path) + error_count = 0 + while True: + await asyncio.sleep(update_delay) + try: + LOGGER.debug("Updating metrics file") + async with aiofiles.open(file_path, "wb") as dst: + await dst.write(generate_latest(registry)) + error_count = 0 + except asyncio.CancelledError: + LOGGER.info("Write metric task is cancelled") + break + except Exception: + error_count += 1 + LOGGER.exception("Failed to write metrics to file: %s", file_path) + if error_count >= _MAX_FILE_WRITE_ERRORS: + LOGGER.warning("Update metrics file: total number of errors %s. Exit", error_count) + break + + +def start_export_to_file(file_path: str, update_delay: float) -> None: + """Starts background asyncio task to dump metrics into a file. + :param file_path: file name + :param update_delay: interval in seconds between writing to file. + """ + assert file_path + assert _METRIC_CONTEXT.registry is not None + task = asyncio.create_task( + _update_metric_file(file_path, update_delay, _METRIC_CONTEXT.registry) + ) + _METRIC_CONTEXT.write_to_file_task = task + + +def stop_export_to_file() -> None: + task = _METRIC_CONTEXT.write_to_file_task + if task is not None: + task.cancel() + _METRIC_CONTEXT.write_to_file_task = None + + +def export_to_http_response() -> Tuple[Any, str]: + """Returns tuple of exported metrics and content-type value""" + assert _METRIC_CONTEXT.registry is not None + return generate_latest(_METRIC_CONTEXT.registry), CONTENT_TYPE_LATEST diff --git a/src/huntflow_base_metrics/fastapi.py b/src/huntflow_base_metrics/fastapi.py new file mode 100644 index 0000000..2d7f2af --- /dev/null +++ b/src/huntflow_base_metrics/fastapi.py @@ -0,0 +1,135 @@ +import time +from typing import List, Optional, Tuple + +from fastapi import FastAPI +from prometheus_client import CONTENT_TYPE_LATEST, Counter, Gauge, Histogram, generate_latest +from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoint +from starlette.requests import Request +from starlette.responses import Response +from starlette.routing import Match +from starlette.status import HTTP_500_INTERNAL_SERVER_ERROR + +from .base import REGISTRY, apply_labels, register_metric + +# Metrics labels for HTTP requests stats +HTTP_METRICS_LABELS = ["method", "path_template"] + + +REQUESTS = register_metric( + Counter, + "requests_total", + "Total count of requests by method and path.", + HTTP_METRICS_LABELS, +) +RESPONSES = register_metric( + Counter, + "responses_total", + "Total count of responses by method, path and status codes.", + HTTP_METRICS_LABELS + ["status_code"], +) +REQUESTS_PROCESSING_TIME = register_metric( + Histogram, + "requests_processing_time_seconds", + "Histogram of requests processing time by path (in seconds)", + HTTP_METRICS_LABELS, +) +EXCEPTIONS = register_metric( + Counter, + "exceptions_total", + "Total count of exceptions raised by path and exception type", + HTTP_METRICS_LABELS + ["exception_type"], +) +REQUESTS_IN_PROGRESS = register_metric( + Gauge, + "requests_in_progress", + "Gauge of requests by method and path currently being processed", + HTTP_METRICS_LABELS, +) + + +class _PrometheusMiddleware(BaseHTTPMiddleware): + include_routes: Optional[List[str]] = None + exclude_routes: Optional[List[str]] = None + + async def dispatch(self, request: Request, call_next: RequestResponseEndpoint) -> Response: + method = request.method + path_template, is_handled_path = self.get_path_template(request) + + if not is_handled_path or self._is_path_excluded(path_template): + return await call_next(request) + + apply_labels(REQUESTS_IN_PROGRESS, method=method, path_template=path_template).inc() + apply_labels(REQUESTS, method=method, path_template=path_template).inc() + + before_time = time.perf_counter() + status_code = HTTP_500_INTERNAL_SERVER_ERROR + try: + response = await call_next(request) + except BaseException as e: + apply_labels( + EXCEPTIONS, + method=method, + path_template=path_template, + exception_type=type(e).__name__, + ).inc() + raise + else: + status_code = response.status_code + after_time = time.perf_counter() + apply_labels( + REQUESTS_PROCESSING_TIME, method=method, path_template=path_template + ).observe(after_time - before_time) + finally: + apply_labels( + RESPONSES, + method=method, + path_template=path_template, + status_code=str(status_code), + ).inc() + apply_labels( + REQUESTS_IN_PROGRESS, + method=method, + path_template=path_template, + ).dec() + + return response + + @classmethod + def _is_path_excluded(cls, path_template: str) -> bool: + if cls.include_routes: + return path_template not in cls.include_routes + if cls.exclude_routes: + return path_template in cls.exclude_routes + return False + + @staticmethod + def get_path_template(request: Request) -> Tuple[str, bool]: + for route in request.app.routes: + match, _ = route.matches(request.scope) + if match == Match.FULL: + return route.path, True + + return request.url.path, False + + +def add_middleware( + app: FastAPI, + include_routes: Optional[List[str]] = None, + exclude_routes: Optional[List[str]] = None, +) -> None: + """Add observing middleware to the given FastAPI application. + :param include_routes: optional list of path templates to observe. + If it's not empty, then only the specified routes will be observed + (also exclude_routes will be ignored). + :param exclude_routes: optional list of path templates to not observer. + If it's not empty (and include_routes is not specified), then the + specified routes will not be observed. + """ + _PrometheusMiddleware.include_routes = include_routes + _PrometheusMiddleware.exclude_routes = exclude_routes + app.add_middleware(_PrometheusMiddleware) + + +def get_http_response_metrics() -> Response: + """Method returns HTTP Response with current metrics in prometheus format.""" + return Response(generate_latest(REGISTRY), headers={"Content-Type": CONTENT_TYPE_LATEST}) diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..b2834d3 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,9 @@ +import pytest + +from huntflow_base_metrics import stop_metrics + + +@pytest.fixture(scope="function", autouse=True) +def disable_metrics(): + """Disable metrics on every test startup""" + stop_metrics() diff --git a/tests/test_base.py b/tests/test_base.py new file mode 100644 index 0000000..08fbb94 --- /dev/null +++ b/tests/test_base.py @@ -0,0 +1,29 @@ +import pytest +from prometheus_client import Histogram + +from huntflow_base_metrics import apply_labels, register_metric +from huntflow_base_metrics.base import COMMON_LABELS_VALUES + + +def test_apply_labels_ok(): + historgram = register_metric( + Histogram, "unique_historgram1", "Test histogram", ["label_one", "label_two"] + ) + result_histogram = apply_labels(historgram, label_one="a", label_two="b") + expected_values = tuple(list(COMMON_LABELS_VALUES.values()) + ["a", "b"]) + assert expected_values in historgram._metrics + assert result_histogram is historgram._metrics[expected_values] + + +def test_apply_labels_mismatch(): + historgram = register_metric( + Histogram, "unique_historgram2", "Test histogram", ["label_one", "label_two"] + ) + with pytest.raises(ValueError): + apply_labels(historgram) + + with pytest.raises(ValueError): + apply_labels(historgram, label_1="a", label_2="b") + + with pytest.raises(ValueError): + apply_labels(historgram, label_one="a", label_two="b", label_three="c") diff --git a/tests/test_export.py b/tests/test_export.py new file mode 100644 index 0000000..e9ea308 --- /dev/null +++ b/tests/test_export.py @@ -0,0 +1,57 @@ +import asyncio +from uuid import uuid4 + +from prometheus_client.parser import text_fd_to_metric_families + +from huntflow_base_metrics import ( + observe_metrics, + register_method_observe_histogram, + start_metrics, +) +from huntflow_base_metrics.export import start_export_to_file, stop_export_to_file + + +async def test_file_export(tmp_path): + method = "test_method" + metric_name = "test_export_histogram" + histogram = register_method_observe_histogram(metric_name, "Test histogram") + + @observe_metrics(method, histogram) + async def observable_func(sleep_time=None): + if sleep_time: + await asyncio.sleep(sleep_time) + return sleep_time + + facility_name = uuid4().hex + facility_id = uuid4().hex + + start_metrics(facility_name, facility_id) + + sleep_time = 0.2 + result = await observable_func(sleep_time) + assert sleep_time == result + + file_path = tmp_path / "test_metrics.prom" + start_export_to_file(file_path, 0.1) + try: + await asyncio.sleep(0.15) + with open(file_path) as fin: + metrics = [ + metric for metric in text_fd_to_metric_families(fin) if metric.name == metric_name + ] + finally: + stop_export_to_file() + await asyncio.sleep(0) + + assert len(metrics) == 1 + metric = metrics[0] + count = None + sum_ = None + for sample in metric.samples: + if sample.name == "test_export_histogram_count": + count = sample.value + elif sample.name == "test_export_histogram_sum": + sum_ = sample.value + assert count == 1 + assert sum_ is not None + assert (sleep_time - 0.01) < sum_ < (sleep_time + 0.01) diff --git a/tests/test_fastapi_metrics.py b/tests/test_fastapi_metrics.py new file mode 100644 index 0000000..31ced6a --- /dev/null +++ b/tests/test_fastapi_metrics.py @@ -0,0 +1,218 @@ +from contextlib import suppress +from uuid import uuid4 + +from fastapi import FastAPI +from starlette.testclient import TestClient + +from huntflow_base_metrics import add_middleware, start_metrics +from huntflow_base_metrics.base import COMMON_LABELS_VALUES, REGISTRY + +FACILITY_NAME = "test_service" +FACILITY_ID = uuid4().hex + + +def create_app() -> FastAPI: + app = FastAPI() + + @app.get("/valueerror") + async def get_valuerror(): + raise ValueError() + + @app.get("/ok") + async def get_ok(): + return {"status": "ok"} + + @app.get("/one") + async def get_one(): + return {"status": "one"} + + @app.get("/two") + async def get_two(): + return {"status": "two"} + + return app + + +def test_ok(): + app = create_app() + client = TestClient(app) + start_metrics(FACILITY_NAME, FACILITY_ID) + add_middleware(app) + response = client.get("/ok") + assert response.status_code == 200 + assert response.json() == {"status": "ok"} + + labels = COMMON_LABELS_VALUES.copy() + labels.update( + { + "method": "GET", + "path_template": "/ok", + } + ) + assert ( + REGISTRY.get_sample_value( + "requests_total", + labels, + ) + == 1 + ) + + labels_responses_total = labels.copy() + labels_responses_total["status_code"] = "200" + assert ( + REGISTRY.get_sample_value( + "responses_total", + labels_responses_total, + ) + == 1 + ) + + labels_proc_time = labels.copy() + labels_proc_time["le"] = "0.005" + assert ( + REGISTRY.get_sample_value( + "requests_processing_time_seconds_bucket", + labels_proc_time, + ) + == 1 + ) + + labels_missed = labels.copy() + labels_missed["path_template"] = "/unknown_path" + assert ( + REGISTRY.get_sample_value( + "requests_total", + labels_missed, + ) + is None + ) + + +def test_exception(): + app = create_app() + client = TestClient(app) + start_metrics(FACILITY_NAME, FACILITY_ID) + add_middleware(app) + with suppress(ValueError): + client.get("/valueerror") + + labels = COMMON_LABELS_VALUES.copy() + labels.update( + { + "method": "GET", + "path_template": "/valueerror", + "exception_type": "ValueError", + } + ) + + assert ( + REGISTRY.get_sample_value( + "exceptions_total", + labels, + ) + == 1 + ) + + +def test_include(): + app = create_app() + client = TestClient(app) + start_metrics(FACILITY_NAME, FACILITY_ID) + add_middleware(app, include_routes=["/ok"]) + + response = client.get("/ok") + assert response.status_code == 200 + assert response.json() == {"status": "ok"} + + response = client.get("/one") + assert response.status_code == 200 + assert response.json() == {"status": "one"} + + response = client.get("/two") + assert response.status_code == 200 + assert response.json() == {"status": "two"} + + labels = COMMON_LABELS_VALUES.copy() + labels.update( + { + "method": "GET", + "path_template": "/ok", + } + ) + assert ( + REGISTRY.get_sample_value( + "requests_total", + labels, + ) + == 1 + ) + + labels["path_template"] = "/one" + assert ( + REGISTRY.get_sample_value( + "requests_total", + labels, + ) + is None + ) + + labels["path_template"] = "/two" + assert ( + REGISTRY.get_sample_value( + "requests_total", + labels, + ) + is None + ) + + +def test_exclude(): + app = create_app() + client = TestClient(app) + start_metrics(FACILITY_NAME, FACILITY_ID) + add_middleware(app, exclude_routes=["/ok", "/one"]) + + response = client.get("/ok") + assert response.status_code == 200 + assert response.json() == {"status": "ok"} + + response = client.get("/one") + assert response.status_code == 200 + assert response.json() == {"status": "one"} + + response = client.get("/two") + assert response.status_code == 200 + assert response.json() == {"status": "two"} + + labels = COMMON_LABELS_VALUES.copy() + labels.update( + { + "method": "GET", + "path_template": "/ok", + } + ) + assert ( + REGISTRY.get_sample_value( + "requests_total", + labels, + ) + is None + ) + + labels["path_template"] = "/one" + assert ( + REGISTRY.get_sample_value( + "requests_total", + labels, + ) + is None + ) + + labels["path_template"] = "/two" + assert ( + REGISTRY.get_sample_value( + "requests_total", + labels, + ) + == 1 + ) diff --git a/tests/test_observe_metrics.py b/tests/test_observe_metrics.py new file mode 100644 index 0000000..1572fd7 --- /dev/null +++ b/tests/test_observe_metrics.py @@ -0,0 +1,89 @@ +import asyncio +import time +from unittest.mock import Mock +from uuid import uuid4 + +from huntflow_base_metrics.base import ( + COMMON_LABELS_VALUES, + REGISTRY, + observe_metrics, + register_method_observe_histogram, + start_metrics, +) + + +async def test_disabled_metrics(): + histogram_mock = Mock() + method = "asdfg" + + @observe_metrics(method, histogram_mock) + async def observable_func(sleep_time=None): + if sleep_time: + await asyncio.sleep(sleep_time) + return sleep_time + + @observe_metrics(method, histogram_mock) + def sync_observable_func(sleep_time=None): + if sleep_time: + time.sleep(sleep_time) + return sleep_time + + sleep_time = 0.3 + result = await observable_func(sleep_time) + assert sleep_time == result + + result = sync_observable_func(sleep_time) + assert sleep_time == result + + assert not histogram_mock.labels.called + + +async def test_histogram_metrics(): + method = "asdfg" + metric_name = "test_timing_histogram" + histogram = register_method_observe_histogram(metric_name, "Test histogram") + + @observe_metrics(method, histogram) + async def observable_func(sleep_time=None): + if sleep_time: + await asyncio.sleep(sleep_time) + return sleep_time + + facility_name = uuid4().hex + facility_id = uuid4().hex + + start_metrics(facility_name, facility_id) + + sleep_time = 0.3 + result = await observable_func(sleep_time) + assert sleep_time == result + + labels = COMMON_LABELS_VALUES.copy() + labels.update( + { + "method": method, + } + ) + + time_sum = REGISTRY.get_sample_value( + "test_timing_histogram_sum", + labels, + ) + assert time_sum is not None + assert sleep_time <= time_sum < sleep_time + 0.01 + + @observe_metrics(method, histogram) + def sync_observable_func(sleep_time=None): + if sleep_time: + time.sleep(sleep_time) + return sleep_time + + result = sync_observable_func(sleep_time) + assert sleep_time == result + + time_sum = REGISTRY.get_sample_value( + "test_timing_histogram_sum", + labels, + ) + assert time_sum is not None + assert sleep_time * 2 <= time_sum < sleep_time * 2 + 0.01