From 7f5a507c586629789142fdc8309ef160de4dc03c Mon Sep 17 00:00:00 2001 From: Jean-Louis Fuchs Date: Tue, 2 Jan 2024 16:36:44 +0100 Subject: [PATCH] test: add cov, flake8, mypy, black and isort --- .gitignore | 2 + Makefile | 48 +- TODO | 13 +- poetry.lock | 336 ++++--- pyaptly/__init__.py | 1672 +------------------------------- pyaptly/aptly_test.py | 14 +- pyaptly/cli.py | 12 +- pyaptly/config_file.py | 10 +- pyaptly/conftest.py | 27 +- pyaptly/dateround_test.py | 170 ++-- pyaptly/graph_test.py | 7 +- pyaptly/helpers_test.py | 15 +- pyaptly/legacy.py | 1673 +++++++++++++++++++++++++++++++++ pyaptly/test.py | 7 +- pyaptly/test_test.py | 44 +- pyaptly/tests/__init__.py | 1 + pyaptly/tests/bad-unicode.bin | 1 + pyaptly/tests/test_mirror.py | 3 +- pyaptly/tests/test_util.py | 31 + pyaptly/util.py | 23 +- pyaptly/version.py | 3 + pyproject.toml | 44 +- 22 files changed, 2222 insertions(+), 1934 deletions(-) mode change 100755 => 100644 pyaptly/__init__.py create mode 100755 pyaptly/legacy.py create mode 100644 pyaptly/tests/__init__.py create mode 100644 pyaptly/tests/bad-unicode.bin create mode 100644 pyaptly/tests/test_util.py diff --git a/.gitignore b/.gitignore index a1fa701..19dae60 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,4 @@ __pycache__ /.hypothesis +/.dmypy.json +/.coverage \ No newline at end of file diff --git a/Makefile b/Makefile index a239346..9879c5e 100644 --- a/Makefile +++ b/Makefile @@ -40,14 +40,52 @@ wait-for-ready: up ## wait for web-server to be ready for testing poetry-install: wait-for-ready ## install dev environment @docker compose exec testing poetry install +.PHONY: mypy +mypy: poetry-install + @docker compose exec testing poetry run dmypy run -- pyaptly + +.PHONY: pytest +pytest: poetry-install ## run pytest + @docker compose exec testing poetry run pytest -vv --cov + +.PHONY: check-isort +check-isort: poetry-install ## check isort + @docker compose exec testing poetry run isort --check pyaptly + +.PHONY: check-black +check-black: poetry-install ## check black + @docker compose exec testing poetry run black --check pyaptly + +.PHONY: check-black +format-black: poetry-install ## format code with black + @docker compose exec testing poetry run black pyaptly + +.PHONY: flake8 +flake8: poetry-install ## run flake8 + @docker compose exec testing poetry run flake8 pyaptly + +.PHONY: lint-code +lint-code: check-isort check-black flake8 ## check all linters + .PHONY: test -test: poetry-install ## run pytest - @docker compose exec testing poetry run pytest +test: pytest mypy lint-code ## run all testing .PHONY: shell shell: poetry-install ## run shell @docker compose exec testing bash -c "SHELL=bash poetry shell" -.PHONY: entr -entr: poetry-install ## run entr - @docker compose exec testing bash -c "find -name '*.py' | SHELL=bash poetry run entr bash -c 'pytest -x --lf'" +.PHONY: entr-pytest +entr-pytest: poetry-install ## run pytest with entr + @docker compose exec testing bash -c "find -name '*.py' | SHELL=bash poetry run entr bash -c 'pytest -x --lf; echo ---'" + +.PHONY: entr-mypy +entr-mypy: poetry-install ## run pytest with entr + @docker compose exec testing bash -c "find -name '*.py' | SHELL=bash poetry run entr bash -c 'make local-mypy; echo ---'" + +.PHONY: entr-flake8 +entr-flake8: poetry-install ## run flake8 with entr + @docker compose exec testing bash -c "find -name '*.py' | SHELL=bash poetry run entr bash -c 'flake8 pyaptly; echo ---'" + +.PHONY: local-mypy +local-mypy: ## Run mypy as daemon locally (requires local-dev) + @poetry run dmypy run -- pyaptly diff --git a/TODO b/TODO index 78c5c3c..eee9b8c 100644 --- a/TODO +++ b/TODO @@ -1,5 +1,11 @@ # NEXT: Add coverage and mypy +# Call code moves somewhere else + +- Tests to the tests/ directory +- Everything else away from legacy.py +- Remove version.py completely + # Update old files The following files have just been copied without checking if their content is ok: @@ -19,4 +25,9 @@ The reason for this is to ensure the continuity of the git history. # Replace all subprocess commands with a modern one (usually run()) -# All logging should be done via logging (no stdout/stderr) \ No newline at end of file +# All logging should be done via logging (no stdout/stderr) + +# Remove all top-level `# type: ignore` and all `# type: ignore # TODO` + +# Add good documentation to every function and module, but plan to review all of + it once everything is in place diff --git a/poetry.lock b/poetry.lock index 6faf71e..c7bc605 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,54 +1,53 @@ -# This file is automatically @generated by Poetry and should not be changed by hand. +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. [[package]] name = "attrs" -version = "23.1.0" +version = "23.2.0" description = "Classes Without Boilerplate" -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, - {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, ] [package.extras] cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] -dev = ["attrs[docs,tests]", "pre-commit"] +dev = ["attrs[tests]", "pre-commit"] docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] tests = ["attrs[tests-no-zope]", "zope-interface"] -tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] [[package]] name = "black" -version = "23.12.0" +version = "23.12.1" description = "The uncompromising code formatter." -category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "black-23.12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:67f19562d367468ab59bd6c36a72b2c84bc2f16b59788690e02bbcb140a77175"}, - {file = "black-23.12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:bbd75d9f28a7283b7426160ca21c5bd640ca7cd8ef6630b4754b6df9e2da8462"}, - {file = "black-23.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:593596f699ca2dcbbbdfa59fcda7d8ad6604370c10228223cd6cf6ce1ce7ed7e"}, - {file = "black-23.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:12d5f10cce8dc27202e9a252acd1c9a426c83f95496c959406c96b785a92bb7d"}, - {file = "black-23.12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e73c5e3d37e5a3513d16b33305713237a234396ae56769b839d7c40759b8a41c"}, - {file = "black-23.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ba09cae1657c4f8a8c9ff6cfd4a6baaf915bb4ef7d03acffe6a2f6585fa1bd01"}, - {file = "black-23.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ace64c1a349c162d6da3cef91e3b0e78c4fc596ffde9413efa0525456148873d"}, - {file = "black-23.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:72db37a2266b16d256b3ea88b9affcdd5c41a74db551ec3dd4609a59c17d25bf"}, - {file = "black-23.12.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fdf6f23c83078a6c8da2442f4d4eeb19c28ac2a6416da7671b72f0295c4a697b"}, - {file = "black-23.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39dda060b9b395a6b7bf9c5db28ac87b3c3f48d4fdff470fa8a94ab8271da47e"}, - {file = "black-23.12.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7231670266ca5191a76cb838185d9be59cfa4f5dd401b7c1c70b993c58f6b1b5"}, - {file = "black-23.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:193946e634e80bfb3aec41830f5d7431f8dd5b20d11d89be14b84a97c6b8bc75"}, - {file = "black-23.12.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bcf91b01ddd91a2fed9a8006d7baa94ccefe7e518556470cf40213bd3d44bbbc"}, - {file = "black-23.12.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:996650a89fe5892714ea4ea87bc45e41a59a1e01675c42c433a35b490e5aa3f0"}, - {file = "black-23.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdbff34c487239a63d86db0c9385b27cdd68b1bfa4e706aa74bb94a435403672"}, - {file = "black-23.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:97af22278043a6a1272daca10a6f4d36c04dfa77e61cbaaf4482e08f3640e9f0"}, - {file = "black-23.12.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ead25c273adfad1095a8ad32afdb8304933efba56e3c1d31b0fee4143a1e424a"}, - {file = "black-23.12.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c71048345bdbced456cddf1622832276d98a710196b842407840ae8055ade6ee"}, - {file = "black-23.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a832b6e00eef2c13b3239d514ea3b7d5cc3eaa03d0474eedcbbda59441ba5d"}, - {file = "black-23.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:6a82a711d13e61840fb11a6dfecc7287f2424f1ca34765e70c909a35ffa7fb95"}, - {file = "black-23.12.0-py3-none-any.whl", hash = "sha256:a7c07db8200b5315dc07e331dda4d889a56f6bf4db6a9c2a526fa3166a81614f"}, - {file = "black-23.12.0.tar.gz", hash = "sha256:330a327b422aca0634ecd115985c1c7fd7bdb5b5a2ef8aa9888a82e2ebe9437a"}, + {file = "black-23.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0aaf6041986767a5e0ce663c7a2f0e9eaf21e6ff87a5f95cbf3675bfd4c41d2"}, + {file = "black-23.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c88b3711d12905b74206227109272673edce0cb29f27e1385f33b0163c414bba"}, + {file = "black-23.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a920b569dc6b3472513ba6ddea21f440d4b4c699494d2e972a1753cdc25df7b0"}, + {file = "black-23.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:3fa4be75ef2a6b96ea8d92b1587dd8cb3a35c7e3d51f0738ced0781c3aa3a5a3"}, + {file = "black-23.12.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8d4df77958a622f9b5a4c96edb4b8c0034f8434032ab11077ec6c56ae9f384ba"}, + {file = "black-23.12.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:602cfb1196dc692424c70b6507593a2b29aac0547c1be9a1d1365f0d964c353b"}, + {file = "black-23.12.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c4352800f14be5b4864016882cdba10755bd50805c95f728011bcb47a4afd59"}, + {file = "black-23.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:0808494f2b2df923ffc5723ed3c7b096bd76341f6213989759287611e9837d50"}, + {file = "black-23.12.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:25e57fd232a6d6ff3f4478a6fd0580838e47c93c83eaf1ccc92d4faf27112c4e"}, + {file = "black-23.12.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2d9e13db441c509a3763a7a3d9a49ccc1b4e974a47be4e08ade2a228876500ec"}, + {file = "black-23.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1bd9c210f8b109b1762ec9fd36592fdd528485aadb3f5849b2740ef17e674e"}, + {file = "black-23.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:ae76c22bde5cbb6bfd211ec343ded2163bba7883c7bc77f6b756a1049436fbb9"}, + {file = "black-23.12.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1fa88a0f74e50e4487477bc0bb900c6781dbddfdfa32691e780bf854c3b4a47f"}, + {file = "black-23.12.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a4d6a9668e45ad99d2f8ec70d5c8c04ef4f32f648ef39048d010b0689832ec6d"}, + {file = "black-23.12.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b18fb2ae6c4bb63eebe5be6bd869ba2f14fd0259bda7d18a46b764d8fb86298a"}, + {file = "black-23.12.1-cp38-cp38-win_amd64.whl", hash = "sha256:c04b6d9d20e9c13f43eee8ea87d44156b8505ca8a3c878773f68b4e4812a421e"}, + {file = "black-23.12.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e1b38b3135fd4c025c28c55ddfc236b05af657828a8a6abe5deec419a0b7055"}, + {file = "black-23.12.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4f0031eaa7b921db76decd73636ef3a12c942ed367d8c3841a0739412b260a54"}, + {file = "black-23.12.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97e56155c6b737854e60a9ab1c598ff2533d57e7506d97af5481141671abf3ea"}, + {file = "black-23.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:dd15245c8b68fe2b6bd0f32c1556509d11bb33aec9b5d0866dd8e2ed3dba09c2"}, + {file = "black-23.12.1-py3-none-any.whl", hash = "sha256:78baad24af0f033958cad29731e27363183e140962595def56423e626f4bee3e"}, + {file = "black-23.12.1.tar.gz", hash = "sha256:4ce3ef14ebe8d9509188014d96af1c456a910d5b5cbf434a09fef7e024b3d0d5"}, ] [package.dependencies] @@ -68,7 +67,6 @@ uvloop = ["uvloop (>=0.15.2)"] name = "click" version = "8.1.7" description = "Composable command line interface toolkit" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -83,7 +81,6 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -91,11 +88,74 @@ files = [ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +[[package]] +name = "coverage" +version = "7.4.0" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "coverage-7.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36b0ea8ab20d6a7564e89cb6135920bc9188fb5f1f7152e94e8300b7b189441a"}, + {file = "coverage-7.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0676cd0ba581e514b7f726495ea75aba3eb20899d824636c6f59b0ed2f88c471"}, + {file = "coverage-7.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ca5c71a5a1765a0f8f88022c52b6b8be740e512980362f7fdbb03725a0d6b9"}, + {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7c97726520f784239f6c62506bc70e48d01ae71e9da128259d61ca5e9788516"}, + {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:815ac2d0f3398a14286dc2cea223a6f338109f9ecf39a71160cd1628786bc6f5"}, + {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:80b5ee39b7f0131ebec7968baa9b2309eddb35b8403d1869e08f024efd883566"}, + {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5b2ccb7548a0b65974860a78c9ffe1173cfb5877460e5a229238d985565574ae"}, + {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:995ea5c48c4ebfd898eacb098164b3cc826ba273b3049e4a889658548e321b43"}, + {file = "coverage-7.4.0-cp310-cp310-win32.whl", hash = "sha256:79287fd95585ed36e83182794a57a46aeae0b64ca53929d1176db56aacc83451"}, + {file = "coverage-7.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:5b14b4f8760006bfdb6e08667af7bc2d8d9bfdb648351915315ea17645347137"}, + {file = "coverage-7.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:04387a4a6ecb330c1878907ce0dc04078ea72a869263e53c72a1ba5bbdf380ca"}, + {file = "coverage-7.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea81d8f9691bb53f4fb4db603203029643caffc82bf998ab5b59ca05560f4c06"}, + {file = "coverage-7.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74775198b702868ec2d058cb92720a3c5a9177296f75bd97317c787daf711505"}, + {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76f03940f9973bfaee8cfba70ac991825611b9aac047e5c80d499a44079ec0bc"}, + {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:485e9f897cf4856a65a57c7f6ea3dc0d4e6c076c87311d4bc003f82cfe199d25"}, + {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6ae8c9d301207e6856865867d762a4b6fd379c714fcc0607a84b92ee63feff70"}, + {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bf477c355274a72435ceb140dc42de0dc1e1e0bf6e97195be30487d8eaaf1a09"}, + {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:83c2dda2666fe32332f8e87481eed056c8b4d163fe18ecc690b02802d36a4d26"}, + {file = "coverage-7.4.0-cp311-cp311-win32.whl", hash = "sha256:697d1317e5290a313ef0d369650cfee1a114abb6021fa239ca12b4849ebbd614"}, + {file = "coverage-7.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:26776ff6c711d9d835557ee453082025d871e30b3fd6c27fcef14733f67f0590"}, + {file = "coverage-7.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:13eaf476ec3e883fe3e5fe3707caeb88268a06284484a3daf8250259ef1ba143"}, + {file = "coverage-7.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846f52f46e212affb5bcf131c952fb4075b55aae6b61adc9856222df89cbe3e2"}, + {file = "coverage-7.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26f66da8695719ccf90e794ed567a1549bb2644a706b41e9f6eae6816b398c4a"}, + {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:164fdcc3246c69a6526a59b744b62e303039a81e42cfbbdc171c91a8cc2f9446"}, + {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:316543f71025a6565677d84bc4df2114e9b6a615aa39fb165d697dba06a54af9"}, + {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bb1de682da0b824411e00a0d4da5a784ec6496b6850fdf8c865c1d68c0e318dd"}, + {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:0e8d06778e8fbffccfe96331a3946237f87b1e1d359d7fbe8b06b96c95a5407a"}, + {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a56de34db7b7ff77056a37aedded01b2b98b508227d2d0979d373a9b5d353daa"}, + {file = "coverage-7.4.0-cp312-cp312-win32.whl", hash = "sha256:51456e6fa099a8d9d91497202d9563a320513fcf59f33991b0661a4a6f2ad450"}, + {file = "coverage-7.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:cd3c1e4cb2ff0083758f09be0f77402e1bdf704adb7f89108007300a6da587d0"}, + {file = "coverage-7.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e9d1bf53c4c8de58d22e0e956a79a5b37f754ed1ffdbf1a260d9dcfa2d8a325e"}, + {file = "coverage-7.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:109f5985182b6b81fe33323ab4707011875198c41964f014579cf82cebf2bb85"}, + {file = "coverage-7.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cc9d4bc55de8003663ec94c2f215d12d42ceea128da8f0f4036235a119c88ac"}, + {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc6d65b21c219ec2072c1293c505cf36e4e913a3f936d80028993dd73c7906b1"}, + {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a10a4920def78bbfff4eff8a05c51be03e42f1c3735be42d851f199144897ba"}, + {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b8e99f06160602bc64da35158bb76c73522a4010f0649be44a4e167ff8555952"}, + {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7d360587e64d006402b7116623cebf9d48893329ef035278969fa3bbf75b697e"}, + {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:29f3abe810930311c0b5d1a7140f6395369c3db1be68345638c33eec07535105"}, + {file = "coverage-7.4.0-cp38-cp38-win32.whl", hash = "sha256:5040148f4ec43644702e7b16ca864c5314ccb8ee0751ef617d49aa0e2d6bf4f2"}, + {file = "coverage-7.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:9864463c1c2f9cb3b5db2cf1ff475eed2f0b4285c2aaf4d357b69959941aa555"}, + {file = "coverage-7.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:936d38794044b26c99d3dd004d8af0035ac535b92090f7f2bb5aa9c8e2f5cd42"}, + {file = "coverage-7.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:799c8f873794a08cdf216aa5d0531c6a3747793b70c53f70e98259720a6fe2d7"}, + {file = "coverage-7.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7defbb9737274023e2d7af02cac77043c86ce88a907c58f42b580a97d5bcca9"}, + {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a1526d265743fb49363974b7aa8d5899ff64ee07df47dd8d3e37dcc0818f09ed"}, + {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf635a52fc1ea401baf88843ae8708591aa4adff875e5c23220de43b1ccf575c"}, + {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:756ded44f47f330666843b5781be126ab57bb57c22adbb07d83f6b519783b870"}, + {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0eb3c2f32dabe3a4aaf6441dde94f35687224dfd7eb2a7f47f3fd9428e421058"}, + {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bfd5db349d15c08311702611f3dccbef4b4e2ec148fcc636cf8739519b4a5c0f"}, + {file = "coverage-7.4.0-cp39-cp39-win32.whl", hash = "sha256:53d7d9158ee03956e0eadac38dfa1ec8068431ef8058fe6447043db1fb40d932"}, + {file = "coverage-7.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:cfd2a8b6b0d8e66e944d47cdec2f47c48fef2ba2f2dff5a9a75757f64172857e"}, + {file = "coverage-7.4.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:c530833afc4707fe48524a44844493f36d8727f04dcce91fb978c414a8556cc6"}, + {file = "coverage-7.4.0.tar.gz", hash = "sha256:707c0f58cb1712b8809ece32b68996ee1e609f71bd14615bd8f87a1293cb610e"}, +] + +[package.extras] +toml = ["tomli"] + [[package]] name = "docstring-to-markdown" version = "0.13" description = "On the fly conversion of Python docstrings to markdown" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -107,7 +167,6 @@ files = [ name = "fancycompleter" version = "0.9.1" description = "colorful TAB completion for Python prompt" -category = "dev" optional = false python-versions = "*" files = [ @@ -123,7 +182,6 @@ pyrepl = ">=0.8.2" name = "flake8" version = "6.1.0" description = "the modular source code checker: pep8 pyflakes and co" -category = "dev" optional = false python-versions = ">=3.8.1" files = [ @@ -140,7 +198,6 @@ pyflakes = ">=3.1.0,<3.2.0" name = "flake8-bugbear" version = "23.12.2" description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." -category = "dev" optional = false python-versions = ">=3.8.1" files = [ @@ -159,7 +216,6 @@ dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit", "pytest", name = "flake8-debugger" version = "4.1.2" description = "ipdb/pdb statement checker plugin for flake8" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -175,7 +231,6 @@ pycodestyle = "*" name = "flake8-docstrings" version = "1.7.0" description = "Extension for flake8 which uses pydocstyle to check docstrings" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -191,7 +246,6 @@ pydocstyle = ">=2.1" name = "flake8-isort" version = "6.1.1" description = "flake8 plugin that integrates isort" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -206,11 +260,26 @@ isort = ">=5.0.0,<6" [package.extras] test = ["pytest"] +[[package]] +name = "flake8-pyproject" +version = "1.2.3" +description = "Flake8 plug-in loading the configuration from pyproject.toml" +optional = false +python-versions = ">= 3.6" +files = [ + {file = "flake8_pyproject-1.2.3-py3-none-any.whl", hash = "sha256:6249fe53545205af5e76837644dc80b4c10037e73a0e5db87ff562d75fb5bd4a"}, +] + +[package.dependencies] +Flake8 = ">=5" + +[package.extras] +dev = ["pyTest", "pyTest-cov"] + [[package]] name = "flake8-string-format" version = "0.3.0" description = "string format checker, plugin for flake8" -category = "dev" optional = false python-versions = "*" files = [ @@ -225,7 +294,6 @@ flake8 = "*" name = "flake8-tuple" version = "0.4.1" description = "Check code for 1 element tuple." -category = "dev" optional = false python-versions = "*" files = [ @@ -239,14 +307,13 @@ six = "*" [[package]] name = "freezegun" -version = "1.3.1" +version = "1.4.0" description = "Let your Python tests travel through time" -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "freezegun-1.3.1-py3-none-any.whl", hash = "sha256:065e77a12624d05531afa87ade12a0b9bdb53495c4573893252a055b545ce3ea"}, - {file = "freezegun-1.3.1.tar.gz", hash = "sha256:48984397b3b58ef5dfc645d6a304b0060f612bcecfdaaf45ce8aff0077a6cb6a"}, + {file = "freezegun-1.4.0-py3-none-any.whl", hash = "sha256:55e0fc3c84ebf0a96a5aa23ff8b53d70246479e9a68863f1fcac5a3e52f19dd6"}, + {file = "freezegun-1.4.0.tar.gz", hash = "sha256:10939b0ba0ff5adaecf3b06a5c2f73071d9678e507c5eaedb23c761d56ac774b"}, ] [package.dependencies] @@ -254,14 +321,13 @@ python-dateutil = ">=2.7" [[package]] name = "hypothesis" -version = "6.92.0" +version = "6.92.2" description = "A library for property-based testing" -category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "hypothesis-6.92.0-py3-none-any.whl", hash = "sha256:d4577f99b912acc725bea684899b7cb62591a0412e2446c618be0b4855995276"}, - {file = "hypothesis-6.92.0.tar.gz", hash = "sha256:65b72c7dc7da3e16144db54fe093c6b74a33631b933a8063eb754c5a61361ae6"}, + {file = "hypothesis-6.92.2-py3-none-any.whl", hash = "sha256:d335044492acb03fa1fdb4edacb81cca2e578049fc7306345bc0e8947fef15a9"}, + {file = "hypothesis-6.92.2.tar.gz", hash = "sha256:841f89a486c43bdab55698de8929bd2635639ec20bf6ce98ccd75622d7ee6d41"}, ] [package.dependencies] @@ -288,7 +354,6 @@ zoneinfo = ["backports.zoneinfo (>=0.2.1)", "tzdata (>=2023.3)"] name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -300,7 +365,6 @@ files = [ name = "isort" version = "5.13.2" description = "A Python utility / library to sort Python imports." -category = "dev" optional = false python-versions = ">=3.8.0" files = [ @@ -315,7 +379,6 @@ colors = ["colorama (>=0.4.6)"] name = "jedi" version = "0.19.1" description = "An autocompletion tool for Python that can be used for text editors." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -335,7 +398,6 @@ testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] name = "mccabe" version = "0.7.0" description = "McCabe checker, plugin for flake8" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -347,7 +409,6 @@ files = [ name = "mock" version = "5.1.0" description = "Rolling backport of unittest.mock for all Pythons" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -362,39 +423,38 @@ test = ["pytest", "pytest-cov"] [[package]] name = "mypy" -version = "1.7.1" +version = "1.8.0" description = "Optional static typing for Python" -category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.7.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:12cce78e329838d70a204293e7b29af9faa3ab14899aec397798a4b41be7f340"}, - {file = "mypy-1.7.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1484b8fa2c10adf4474f016e09d7a159602f3239075c7bf9f1627f5acf40ad49"}, - {file = "mypy-1.7.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31902408f4bf54108bbfb2e35369877c01c95adc6192958684473658c322c8a5"}, - {file = "mypy-1.7.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f2c2521a8e4d6d769e3234350ba7b65ff5d527137cdcde13ff4d99114b0c8e7d"}, - {file = "mypy-1.7.1-cp310-cp310-win_amd64.whl", hash = "sha256:fcd2572dd4519e8a6642b733cd3a8cfc1ef94bafd0c1ceed9c94fe736cb65b6a"}, - {file = "mypy-1.7.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4b901927f16224d0d143b925ce9a4e6b3a758010673eeded9b748f250cf4e8f7"}, - {file = "mypy-1.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2f7f6985d05a4e3ce8255396df363046c28bea790e40617654e91ed580ca7c51"}, - {file = "mypy-1.7.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:944bdc21ebd620eafefc090cdf83158393ec2b1391578359776c00de00e8907a"}, - {file = "mypy-1.7.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9c7ac372232c928fff0645d85f273a726970c014749b924ce5710d7d89763a28"}, - {file = "mypy-1.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:f6efc9bd72258f89a3816e3a98c09d36f079c223aa345c659622f056b760ab42"}, - {file = "mypy-1.7.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6dbdec441c60699288adf051f51a5d512b0d818526d1dcfff5a41f8cd8b4aaf1"}, - {file = "mypy-1.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4fc3d14ee80cd22367caaaf6e014494415bf440980a3045bf5045b525680ac33"}, - {file = "mypy-1.7.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c6e4464ed5f01dc44dc9821caf67b60a4e5c3b04278286a85c067010653a0eb"}, - {file = "mypy-1.7.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:d9b338c19fa2412f76e17525c1b4f2c687a55b156320acb588df79f2e6fa9fea"}, - {file = "mypy-1.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:204e0d6de5fd2317394a4eff62065614c4892d5a4d1a7ee55b765d7a3d9e3f82"}, - {file = "mypy-1.7.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:84860e06ba363d9c0eeabd45ac0fde4b903ad7aa4f93cd8b648385a888e23200"}, - {file = "mypy-1.7.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8c5091ebd294f7628eb25ea554852a52058ac81472c921150e3a61cdd68f75a7"}, - {file = "mypy-1.7.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40716d1f821b89838589e5b3106ebbc23636ffdef5abc31f7cd0266db936067e"}, - {file = "mypy-1.7.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5cf3f0c5ac72139797953bd50bc6c95ac13075e62dbfcc923571180bebb662e9"}, - {file = "mypy-1.7.1-cp38-cp38-win_amd64.whl", hash = "sha256:78e25b2fd6cbb55ddfb8058417df193f0129cad5f4ee75d1502248e588d9e0d7"}, - {file = "mypy-1.7.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:75c4d2a6effd015786c87774e04331b6da863fc3fc4e8adfc3b40aa55ab516fe"}, - {file = "mypy-1.7.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2643d145af5292ee956aa0a83c2ce1038a3bdb26e033dadeb2f7066fb0c9abce"}, - {file = "mypy-1.7.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75aa828610b67462ffe3057d4d8a4112105ed211596b750b53cbfe182f44777a"}, - {file = "mypy-1.7.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ee5d62d28b854eb61889cde4e1dbc10fbaa5560cb39780c3995f6737f7e82120"}, - {file = "mypy-1.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:72cf32ce7dd3562373f78bd751f73c96cfb441de147cc2448a92c1a308bd0ca6"}, - {file = "mypy-1.7.1-py3-none-any.whl", hash = "sha256:f7c5d642db47376a0cc130f0de6d055056e010debdaf0707cd2b0fc7e7ef30ea"}, - {file = "mypy-1.7.1.tar.gz", hash = "sha256:fcb6d9afb1b6208b4c712af0dafdc650f518836065df0d4fb1d800f5d6773db2"}, + {file = "mypy-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:485a8942f671120f76afffff70f259e1cd0f0cfe08f81c05d8816d958d4577d3"}, + {file = "mypy-1.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:df9824ac11deaf007443e7ed2a4a26bebff98d2bc43c6da21b2b64185da011c4"}, + {file = "mypy-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2afecd6354bbfb6e0160f4e4ad9ba6e4e003b767dd80d85516e71f2e955ab50d"}, + {file = "mypy-1.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8963b83d53ee733a6e4196954502b33567ad07dfd74851f32be18eb932fb1cb9"}, + {file = "mypy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:e46f44b54ebddbeedbd3d5b289a893219065ef805d95094d16a0af6630f5d410"}, + {file = "mypy-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:855fe27b80375e5c5878492f0729540db47b186509c98dae341254c8f45f42ae"}, + {file = "mypy-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c886c6cce2d070bd7df4ec4a05a13ee20c0aa60cb587e8d1265b6c03cf91da3"}, + {file = "mypy-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d19c413b3c07cbecf1f991e2221746b0d2a9410b59cb3f4fb9557f0365a1a817"}, + {file = "mypy-1.8.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9261ed810972061388918c83c3f5cd46079d875026ba97380f3e3978a72f503d"}, + {file = "mypy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:51720c776d148bad2372ca21ca29256ed483aa9a4cdefefcef49006dff2a6835"}, + {file = "mypy-1.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:52825b01f5c4c1c4eb0db253ec09c7aa17e1a7304d247c48b6f3599ef40db8bd"}, + {file = "mypy-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f5ac9a4eeb1ec0f1ccdc6f326bcdb464de5f80eb07fb38b5ddd7b0de6bc61e55"}, + {file = "mypy-1.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afe3fe972c645b4632c563d3f3eff1cdca2fa058f730df2b93a35e3b0c538218"}, + {file = "mypy-1.8.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:42c6680d256ab35637ef88891c6bd02514ccb7e1122133ac96055ff458f93fc3"}, + {file = "mypy-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:720a5ca70e136b675af3af63db533c1c8c9181314d207568bbe79051f122669e"}, + {file = "mypy-1.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:028cf9f2cae89e202d7b6593cd98db6759379f17a319b5faf4f9978d7084cdc6"}, + {file = "mypy-1.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4e6d97288757e1ddba10dd9549ac27982e3e74a49d8d0179fc14d4365c7add66"}, + {file = "mypy-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f1478736fcebb90f97e40aff11a5f253af890c845ee0c850fe80aa060a267c6"}, + {file = "mypy-1.8.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42419861b43e6962a649068a61f4a4839205a3ef525b858377a960b9e2de6e0d"}, + {file = "mypy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:2b5b6c721bd4aabaadead3a5e6fa85c11c6c795e0c81a7215776ef8afc66de02"}, + {file = "mypy-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c1538c38584029352878a0466f03a8ee7547d7bd9f641f57a0f3017a7c905b8"}, + {file = "mypy-1.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ef4be7baf08a203170f29e89d79064463b7fc7a0908b9d0d5114e8009c3a259"}, + {file = "mypy-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7178def594014aa6c35a8ff411cf37d682f428b3b5617ca79029d8ae72f5402b"}, + {file = "mypy-1.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ab3c84fa13c04aeeeabb2a7f67a25ef5d77ac9d6486ff33ded762ef353aa5592"}, + {file = "mypy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:99b00bc72855812a60d253420d8a2eae839b0afa4938f09f4d2aa9bb4654263a"}, + {file = "mypy-1.8.0-py3-none-any.whl", hash = "sha256:538fd81bb5e430cc1381a443971c0475582ff9f434c16cd46d2c66763ce85d9d"}, + {file = "mypy-1.8.0.tar.gz", hash = "sha256:6ff8b244d7085a0b425b56d327b480c3b29cafbd2eff27316a004f9a7391ae07"}, ] [package.dependencies] @@ -411,7 +471,6 @@ reports = ["lxml"] name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -423,7 +482,6 @@ files = [ name = "packaging" version = "23.2" description = "Core utilities for Python packages" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -435,7 +493,6 @@ files = [ name = "parso" version = "0.8.3" description = "A Python Parser" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -451,7 +508,6 @@ testing = ["docopt", "pytest (<6.0.0)"] name = "pathspec" version = "0.12.1" description = "Utility library for gitignore style pattern matching of file paths." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -463,7 +519,6 @@ files = [ name = "pdbpp" version = "0.10.3" description = "pdb++, a drop-in replacement for pdb" -category = "dev" optional = false python-versions = "*" files = [ @@ -484,7 +539,6 @@ testing = ["funcsigs", "pytest"] name = "platformdirs" version = "4.1.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -500,7 +554,6 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co name = "pluggy" version = "1.3.0" description = "plugin and hook calling mechanisms for python" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -513,10 +566,9 @@ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] [[package]] -name = "pretty-dump" +name = "pretty_dump" version = "3.0" description = "Diff and dump anything" -category = "main" optional = false python-versions = "*" files = [] @@ -535,7 +587,6 @@ resolved_reference = "a5bd2bdfc68d46df01695079886b3818477f3137" name = "pycodestyle" version = "2.11.1" description = "Python style guide checker" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -547,7 +598,6 @@ files = [ name = "pydocstyle" version = "6.3.0" description = "Python docstring style checker" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -565,7 +615,6 @@ toml = ["tomli (>=1.2.3)"] name = "pyflakes" version = "3.1.0" description = "passive checker of Python programs" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -577,7 +626,6 @@ files = [ name = "pygments" version = "2.17.2" description = "Pygments is a syntax highlighting package written in Python." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -593,7 +641,6 @@ windows-terminal = ["colorama (>=0.4.6)"] name = "pyreadline" version = "2.1" description = "A python implmementation of GNU readline." -category = "dev" optional = false python-versions = "*" files = [ @@ -604,7 +651,6 @@ files = [ name = "pyrepl" version = "0.9.0" description = "A library for building flexible command line interfaces" -category = "dev" optional = false python-versions = "*" files = [ @@ -613,14 +659,13 @@ files = [ [[package]] name = "pytest" -version = "7.4.3" +version = "7.4.4" description = "pytest: simple powerful testing with Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-7.4.3-py3-none-any.whl", hash = "sha256:0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac"}, - {file = "pytest-7.4.3.tar.gz", hash = "sha256:d989d136982de4e3b29dabcc838ad581c64e8ed52c11fbe86ddebd9da0818cd5"}, + {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, + {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, ] [package.dependencies] @@ -632,11 +677,56 @@ pluggy = ">=0.12,<2.0" [package.extras] testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +[[package]] +name = "pytest-cov" +version = "4.1.0" +description = "Pytest plugin for measuring coverage." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, + {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, +] + +[package.dependencies] +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] + +[[package]] +name = "pytest-cover" +version = "3.0.0" +description = "Pytest plugin for measuring coverage. Forked from `pytest-cov`." +optional = false +python-versions = "*" +files = [ + {file = "pytest-cover-3.0.0.tar.gz", hash = "sha256:5bdb6c1cc3dd75583bb7bc2c57f5e1034a1bfcb79d27c71aceb0b16af981dbf4"}, + {file = "pytest_cover-3.0.0-py2.py3-none-any.whl", hash = "sha256:578249955eb3b5f3991209df6e532bb770b647743b7392d3d97698dc02f39ebb"}, +] + +[package.dependencies] +pytest-cov = ">=2.0" + +[[package]] +name = "pytest-coverage" +version = "0.0" +description = "Pytest plugin for measuring coverage. Forked from `pytest-cov`." +optional = false +python-versions = "*" +files = [ + {file = "pytest-coverage-0.0.tar.gz", hash = "sha256:db6af2cbd7e458c7c9fd2b4207cee75258243c8a81cad31a7ee8cfad5be93c05"}, + {file = "pytest_coverage-0.0-py2.py3-none-any.whl", hash = "sha256:dedd084c5e74d8e669355325916dc011539b190355021b037242514dee546368"}, +] + +[package.dependencies] +pytest-cover = "*" + [[package]] name = "python-dateutil" version = "2.8.2" description = "Extensions to the standard Python datetime module" -category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ @@ -651,7 +741,6 @@ six = ">=1.5" name = "python-lsp-black" version = "1.3.0" description = "Black plugin for the Python LSP Server" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -670,7 +759,6 @@ dev = ["flake8", "isort (>=5.0)", "mypy", "pre-commit", "pytest", "types-pkg-res name = "python-lsp-isort" version = "0.1" description = "isort plugin for the Python LSP Server" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -688,7 +776,6 @@ dev = ["pytest"] name = "python-lsp-jsonrpc" version = "1.1.2" description = "JSON RPC 2.0 server library" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -706,7 +793,6 @@ test = ["coverage", "pycodestyle", "pyflakes", "pylint", "pytest", "pytest-cov"] name = "python-lsp-server" version = "1.9.0" description = "Python Language Server for the Language Server Protocol" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -739,7 +825,6 @@ yapf = ["whatthepatch (>=1.0.2,<2.0.0)", "yapf (>=0.33.0)"] name = "pytz" version = "2023.3.post1" description = "World timezone definitions, modern and historical" -category = "main" optional = false python-versions = "*" files = [ @@ -751,7 +836,6 @@ files = [ name = "pyyaml" version = "6.0.1" description = "YAML parser and emitter for Python" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -811,7 +895,6 @@ files = [ name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -823,7 +906,6 @@ files = [ name = "snowballstemmer" version = "2.2.0" description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." -category = "dev" optional = false python-versions = "*" files = [ @@ -835,7 +917,6 @@ files = [ name = "sortedcontainers" version = "2.4.0" description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" -category = "dev" optional = false python-versions = "*" files = [ @@ -847,7 +928,6 @@ files = [ name = "testfixtures" version = "7.2.2" description = "A collection of helpers and mock objects for unit tests and doc tests." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -864,7 +944,6 @@ test = ["django", "mypy", "pytest (>=3.6)", "pytest-cov", "pytest-django", "sybi name = "toml" version = "0.10.2" description = "Python Library for Tom's Obvious, Minimal Language" -category = "main" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -872,11 +951,32 @@ files = [ {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] +[[package]] +name = "types-pyyaml" +version = "6.0.12.12" +description = "Typing stubs for PyYAML" +optional = false +python-versions = "*" +files = [ + {file = "types-PyYAML-6.0.12.12.tar.gz", hash = "sha256:334373d392fde0fdf95af5c3f1661885fa10c52167b14593eb856289e1855062"}, + {file = "types_PyYAML-6.0.12.12-py3-none-any.whl", hash = "sha256:c05bc6c158facb0676674b7f11fe3960db4f389718e19e62bd2b84d6205cfd24"}, +] + +[[package]] +name = "types-toml" +version = "0.10.8.7" +description = "Typing stubs for toml" +optional = false +python-versions = "*" +files = [ + {file = "types-toml-0.10.8.7.tar.gz", hash = "sha256:58b0781c681e671ff0b5c0319309910689f4ab40e8a2431e205d70c94bb6efb1"}, + {file = "types_toml-0.10.8.7-py3-none-any.whl", hash = "sha256:61951da6ad410794c97bec035d59376ce1cbf4453dc9b6f90477e81e4442d631"}, +] + [[package]] name = "typing-extensions" version = "4.9.0" description = "Backported and Experimental Type Hints for Python 3.8+" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -888,7 +988,6 @@ files = [ name = "ujson" version = "5.9.0" description = "Ultra fast JSON encoder and decoder for Python" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -963,7 +1062,6 @@ files = [ name = "wmctrl" version = "0.5" description = "A tool to programmatically control windows inside X" -category = "dev" optional = false python-versions = ">=2.7" files = [ @@ -980,4 +1078,4 @@ test = ["pytest"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "4d7ec7ba8ba4181faed29aeb205967f505df634e9f1289b7b815b240402aba1f" +content-hash = "05863f972edaa146a622bd520e2d61b7d384166e4047649c6fda3af5ae977725" diff --git a/pyaptly/__init__.py b/pyaptly/__init__.py old mode 100755 new mode 100644 index d57cb1a..8d96749 --- a/pyaptly/__init__.py +++ b/pyaptly/__init__.py @@ -1,1670 +1,6 @@ -#!/usr/bin/env python2 -"""Aptly mirror/snapshot managment automation.""" -import argparse -import codecs -import collections -import datetime -import logging -import os -import re -import subprocess -import sys +"""PyAptly automates the creation and managment of aptly mirrors and snapshots. -import freeze -import six -import yaml +Configuration is based on toml input files. +""" -_logging_setup = False - -if six.PY2: - environb = os.environ # pragma: no cover -else: - environb = os.environb # pragma: no cover - - -def init_hypothesis(): - """Initialize hypothesis profile if hypothesis is available""" - try: # pragma: no cover:w - if b"HYPOTHESIS_PROFILE" in environb: - from hypothesis import Settings - - Settings.register_profile("ci", Settings(max_examples=10000)) - Settings.load_profile(os.getenv("HYPOTHESIS_PROFILE", "default")) - except (ImportError, AttributeError): # pragma: no cover - pass - - -def get_logger(): - """Get the logger. - - :rtype: logging.Logger""" - return logging.getLogger("pyaptly") - - -lg = get_logger() -init_hypothesis() - - -def iso_first_week_start(iso_year, tzinfo=None): - """The gregorian calendar date of the first day of the given ISO year - - :param iso_year: Year to find the date of the first week. - :type iso_year: int""" - fourth_jan = datetime.datetime(iso_year, 1, 4, tzinfo=tzinfo) - delta = datetime.timedelta(fourth_jan.isoweekday() - 1) - return fourth_jan - delta - - -def iso_to_gregorian(iso_year, iso_week, iso_day, tzinfo=None): - """Gregorian calendar date for the given ISO year, week and day - - :param iso_year: ISO year - :type iso_year: int - :param iso_week: ISO week - :type iso_week: int - :param iso_day: ISO day - :type iso_day: int""" - year_start = iso_first_week_start(iso_year, tzinfo) - return year_start + datetime.timedelta(days=iso_day - 1, weeks=iso_week - 1) - - -def time_remove_tz(time): - """Convert a :py:class`datetime.time` to :py:class`datetime.time` to - without tzinfo. - - :param time: Time to convert - :type time: :py:class:`datetime.time` - :rtype: :py:class:`datetime.time` - """ - return datetime.time( - hour=time.hour, - minute=time.minute, - second=time.second, - microsecond=time.microsecond, - ) - - -def time_delta_helper(time): # pragma: no cover - """Convert a :py:class`datetime.time` to :py:class`datetime.datetime` to - calculate deltas - - :param time: Time to convert - :type time: :py:class:`datetime.time` - :rtype: :py:class:`datetime.datetime` - """ - return datetime.datetime( - year=2000, - month=1, - day=1, - hour=time.hour, - minute=time.minute, - second=time.second, - microsecond=time.microsecond, - tzinfo=time.tzinfo, - ) - - -def date_round_weekly(date, day_of_week=1, time=None): - """Round datetime back (floor) to a given the of the week. - - THIS FUNCTION IGNORES THE TZINFO OF TIME and assumes it is the same tz as - the date. - - :param date: Datetime object to round - :type date: :py:class:`datetime.datetime` - :param day_of_week: ISO day of week: monday is 1 and sunday is 7 - :type day_of_week: int - :param time: Roundpoint in the day (tzinfo ignored) - :type time: :py:class:`datetime.time` - :rtype: :py:class:`datetime.datetime`""" - if time: - time = time_remove_tz(time) - else: # pragma: no cover - time = datetime.time(hour=0, minute=0) - - delta = datetime.timedelta( - days=day_of_week - 1, - hours=time.hour, - minutes=time.minute, - seconds=time.second, - microseconds=time.microsecond, - ) - raster_date = date - delta - iso = raster_date.isocalendar() - rounded_date = iso_to_gregorian(iso[0], iso[1], 1, date.tzinfo) - return rounded_date + delta - - -def date_round_daily(date, time=None): - """Round datetime to day back (floor) to the roundpoint (time) in the day - - THIS FUNCTION IGNORES THE TZINFO OF TIME and assumes it is the same tz as - the date. - - :param date: Datetime object to round - :type date: :py:class:`datetime.datetime` - :param time: Roundpoint in the day (tzinfo ignored) - :type time: :py:class:`datetime.time` - :rtype: :py:class:`datetime.datetime`""" - if time: - time = time_remove_tz(time) - else: # pragma: no cover - time = datetime.time(hour=0, minute=0) - delta = datetime.timedelta( - hours=time.hour, - minutes=time.minute, - seconds=time.second, - microseconds=time.microsecond, - ) - raster_date = date - delta - rounded_date = datetime.datetime( - year=raster_date.year, - month=raster_date.month, - day=raster_date.day, - tzinfo=raster_date.tzinfo, - ) - return rounded_date + delta - - -def call_output(args, input_=None): - """Call command and return output. - - :param args: Command to execute - :type args: list - :param input_: Input to command - :type input_: bytes - """ - p = subprocess.Popen( - args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE - ) - output, err = p.communicate(input_) - if p.returncode != 0: - raise subprocess.CalledProcessError( - p.returncode, - args, - output, - err, - ) - return (output.decode("UTF-8"), err.decode("UTF-8")) - - -class Command(object): - """Repesents a system command and is used to resolve dependencies between - such commands. - - :param cmd: The command as list, one item per argument - :type cmd: list - """ - - pretend_mode = False - - def __init__(self, cmd): - self.cmd = cmd - self._requires = set() - self._provides = set() - self._finished = None - self._known_dependency_types = ( - "snapshot", - "mirror", - "repo", - "publish", - "virtual", - ) - - def get_provides(self): # pragma: no cover - """Return all provides of this command. - - :rtype: set()""" - return self._provides - - def append(self, argument): - """Append additional arguments to the command. - - :param argument: String argument to append - :type argument: str""" - assert str(argument) == argument - self.cmd.append(argument) - - def require(self, type_, identifier): - """Require a dependency for this command. - - :param type_: Type or category of the dependency ie. snapshot - :type type_: str - :param identifier: Identifier of the dependency for example name of a - snapshot - :type identifier: usually str - """ - assert type_ in ( - self._known_dependency_types - + ("any",) - + SystemStateReader.known_dependency_types - ) - self._requires.add((type_, str(identifier))) - - def provide(self, type_, identifier): - """Provide a dependency for this command. - - :param type_: Type or category of the dependency ie. snapshot - :type type_: str - :param identifier: Identifier of the dependency for example name of a - snapshot - :type identifier: usually str - """ - assert type_ in self._known_dependency_types - self._provides.add((type_, str(identifier))) - - def execute(self): - """Execute the command. Return the return value of the command. - - :rtype: integer""" - if self._finished is not None: # pragma: no cover - return self._finished - - if not Command.pretend_mode: - lg.debug("Running command: %s", " ".join(self.cmd)) - self._finished = subprocess.check_call(self.cmd) - else: - lg.info("Pretending to run command: %s", " ".join(self.cmd)) - - return self._finished - - def repr_cmd(self): - """Return repr of the command. - - :rtype: str""" - return repr(self.cmd) - - def __hash__(self): - """Hash of the command. - - :rtype: integer""" - return freeze.recursive_hash((self.cmd, self._requires, self._provides)) - - def __eq__(self, other): - """Equalitity based on the hash, might collide... hmm""" - return self.__hash__() == other.__hash__() - - def __repr__(self): - return "Command<%s requires %s, provides %s>\n" % ( - self.repr_cmd(), - ", ".join([repr(x) for x in self._requires]), - ", ".join([repr(x) for x in self._provides]), - ) - - @staticmethod - def command_list_to_digraph(commands): # pragma: no cover - """Generate dot source for a digraph - suitable for generating - diagrams. - - The requires and provides from the commands build nodes, the commands - themselves act as connectors. - - :param commands: The commands to draw a diagram with - :type commands: list - """ - - nodes = set() - edges = set() - - def result_node(type_, name): - """Get the dot representation of a result node.""" - return ( - '"%s %s" [shape=ellipse]' % (type_, name), - '"%s %s"' % (type_, name), - ) - - def cmd_node(command): - """Get the dot representation of a command node.""" - return ( - '"%s" [shape=box]' % command.repr_cmd(), - '"%s"' % command.repr_cmd(), - ) - - for cmd in commands: - if cmd is None: - continue - - cmd_spec, cmd_identifier = cmd_node(cmd) - nodes.add(cmd_spec) - - for type_, name in cmd._requires: - spec, identifier = result_node(type_, name) - nodes.add(spec) - edges.add((identifier, cmd_identifier)) - - for type_, name in cmd._provides: - spec, identifier = result_node(type_, name) - nodes.add(spec) - edges.add((cmd_identifier, identifier)) - - template = """ - digraph { - %s; - %s; - } - """ - return template % ( - ";\n".join(nodes), - ";\n".join(["%s -> %s" % edge for edge in edges]), - ) - - @staticmethod - def order_commands(commands, has_dependency_cb=lambda x: False): - """Order the commands according to the dependencies they - provide/require. - - :param commands: The commands to order - :type commands: list - :param has_dependency_cb: Optional callback the resolve external - dependencies - :type has_dependency_cb: function""" - - commands = set([c for c in commands if c is not None]) - - lg.debug("Ordering commands: %s", [str(cmd) for cmd in commands]) - - have_requirements = collections.defaultdict(lambda: 0) - required_number = collections.defaultdict(lambda: 0) - scheduled = [] - - for cmd in commands: - for provide in cmd._provides: - required_number[provide] += 1 - - something_changed = True - while something_changed: - something_changed = False - - for cmd in commands: - if cmd in scheduled: - continue - - can_schedule = True - for req in cmd._requires: - if have_requirements[req] < required_number[req]: - lg.debug( - "%s: dependency %s not fulfilled, " - "checking aptly state" % (cmd, req) - ) - # No command providing our dependency.. Let's see if - # it's already otherwise fulfilled - if not has_dependency_cb(req): - lg.debug( - "%s: dependency %s not " - "in aptly state either" % (cmd, req) - ) - can_schedule = False - # Break out of the requirements loop, as the - # command cannot be scheduled anyway. - break - - if can_schedule: - lg.debug("%s: all dependencies fulfilled" % cmd) - scheduled.append(cmd) - for provide in cmd._provides: - have_requirements[provide] += 1 - - something_changed = True - - unresolved = [cmd for cmd in commands if cmd not in scheduled] - - if len(unresolved) > 0: # pragma: no cover - raise ValueError( - "Commands with unresolved deps: %s" % [str(cmd) for cmd in unresolved] - ) - - # Just one last verification before we commence - scheduled_set = set([cmd for cmd in scheduled]) - incoming_set = set([cmd for cmd in commands]) - assert incoming_set == scheduled_set - - lg.info("Reordered commands: %s", [str(cmd) for cmd in scheduled]) - - return scheduled - - -class FunctionCommand(Command): - """Repesents a function command and is used to resolve dependencies between - such commands. This command executes the given function. \*args and - \*\*kwargs are passed through. - - :param func: The function to execute - :type func: callable - """ - - def __init__(self, func, *args, **kwargs): - super(FunctionCommand, self).__init__(None) - - assert hasattr(func, "__call__") - self.cmd = func - self.args = args - self.kwargs = kwargs - - def __hash__(self): - return freeze.recursive_hash( - (id(self.cmd), self.args, self.kwargs, self._requires, self._provides) - ) - - def execute(self): - """Execute the command. (Call the function).""" - if self._finished is not None: # pragma: no cover - return self._finished - - if not Command.pretend_mode: - lg.debug( - "Running code: %s(args=%s, kwargs=%s)", - self.cmd.__name__, - repr(self.args), - repr(self.kwargs), - ) - - self.cmd(*self.args, **self.kwargs) - - self._finished = True - else: # pragma: no cover - lg.info( - "Pretending to run code: %s(args=%s, kwargs=%s)", - self.repr_cmd(), - repr(self.args), - repr(self.kwargs), - ) - - return self._finished - - def repr_cmd(self): - """Return repr of the command. - - :rtype: str""" - # We need to "id" ourselves here so that multiple commands that call a - # function with the same name won't be shown as being equal. - return "%s|%s" % (self.cmd.__name__, id(self)) - - def __repr__(self): - return "FunctionCommand<%s requires %s, provides %s>\n" % ( - self.repr_cmd(), - ", ".join([repr(x) for x in self._requires]), - ", ".join([repr(x) for x in self._provides]), - ) - - -class SystemStateReader(object): - """Reads the state from aptly and gpg to find out what operations have to - be performed to reach the state defined in the yml config-file. - """ - - known_dependency_types = ("repo", "snapshot", "mirror", "gpg_key") - - def __init__(self): - self.gpg_keys = set() - self.mirrors = set() - self.repos = set() - self.snapshots = set() - self.snapshot_map = {} - self.publishes = set() - self.publish_map = {} - - def _extract_sources(self, data): - """ - Extract sources from data. - - Data needs to be in following format: - Name: test-snap - Description: some description - Sources: - test-snap-base [snapshot] - """ - entered_sources = False - sources = [] - for line in data.split("\n"): - # source line need to start with two spaces - if entered_sources and line[0:2] != " ": - break - - if entered_sources: - sources.append(line) - - if line == "Sources:": - entered_sources = True - - return sources - - def read(self): - """Reads all available system states.""" - self.read_gpg() - self.read_repos() - self.read_mirror() - self.read_snapshot() - self.read_snapshot_map() - self.read_publishes() - self.read_publish_map() - - def read_gpg(self): - """Read all trusted keys in gpg.""" - self.gpg_keys = set() - cmd = [ - "gpg", - "--no-default-keyring", - "--keyring", - "trustedkeys.gpg", - "--list-keys", - "--with-colons", - ] - data, _ = call_output(cmd) - lg.debug("GPG returned: %s", data) - for line in data.split("\n"): - field = line.split(":") - if field[0] in ("pub", "sub"): - key = field[4] - key_short = key[8:] - self.gpg_keys.add(key) - self.gpg_keys.add(key_short) - - def read_publish_map(self): - """Create a publish map. publish -> snapshots""" - self.publish_map = {} - # match example: main: test-snapshot [snapshot] - re_snap = re.compile(r"\s+[\w\d-]+\:\s([\w\d-]+)\s\[snapshot\]") - for publish in self.publishes: - prefix, dist = publish.split(" ") - data, _ = call_output(["aptly", "publish", "show", dist, prefix]) - - sources = self._extract_sources(data) - matches = [re_snap.match(source) for source in sources] - snapshots = [match.group(1) for match in matches if match] - self.publish_map[publish] = set(snapshots) - - lg.debug("Joined snapshots and publishes: %s", self.publish_map) - - def read_snapshot_map(self): - """Create a snapshot map. snapshot -> snapshots. This is also called - merge-tree.""" - self.snapshot_map = {} - # match example: test-snapshot [snapshot] - re_snap = re.compile(r"\s+([\w\d-]+)\s\[snapshot\]") - for snapshot_outer in self.snapshots: - data, _ = call_output(["aptly", "snapshot", "show", snapshot_outer]) - sources = self._extract_sources(data) - matches = [re_snap.match(source) for source in sources] - snapshots = [match.group(1) for match in matches if match] - self.snapshot_map[snapshot_outer] = set(snapshots) - - lg.debug("Joined snapshots with self(snapshots): %s", self.snapshot_map) - - def read_publishes(self): - """Read all available publishes.""" - self.publishes = set() - self.read_aptly_list("publish", self.publishes) - - def read_repos(self): - """Read all available repos.""" - self.repos = set() - self.read_aptly_list("repo", self.repos) - - def read_mirror(self): - """Read all available mirrors.""" - self.mirrors = set() - self.read_aptly_list("mirror", self.mirrors) - - def read_snapshot(self): - """Read all available snapshots.""" - self.snapshots = set() - self.read_aptly_list("snapshot", self.snapshots) - - def read_aptly_list(self, type_, list_): - """Generic method to read lists from aptly. - - :param type_: The type of list to read ie. snapshot - :type type_: str - :param list_: Read into this list - :param list_: list""" - data, _ = call_output(["aptly", type_, "list", "-raw"]) - lg.debug("Aptly returned %s: %s", type_, data) - for line in data.split("\n"): - clean_line = line.strip() - if clean_line: - list_.add(clean_line) - - def has_dependency(self, dependency): - """Check system state dependencies. - - :param dependency: The dependency to check - :type dependency: list""" - type_, name = dependency - - if type_ == "repo": # pragma: no cover - return name in self.repos - if type_ == "mirror": # pragma: no cover - return name in self.mirrors - elif type_ == "snapshot": - return name in self.snapshots # pragma: no cover - elif type_ == "gpg_key": # pragma: no cover - return name in self.gpg_keys # Not needed ATM - elif type_ == "virtual": - # virtual dependencies can never be resolved by the - # system state reader - they are used for internal - # ordering only - return False - else: - raise ValueError("Unknown dependency to resolve: %s" % str(dependency)) - - -state = SystemStateReader() - - -def main(argv=None): - """Called by command-line, defines parsers and executes commands. - - :param argv: Arguments usually taken from sys.argv - :type argv: list""" - global _logging_setup - if not argv: # pragma: no cover - argv = sys.argv[1:] - parser = argparse.ArgumentParser(description="Manage aptly") - parser.add_argument( - "--config", - "-c", - help="Yaml config file defining mirrors and snapshots", - type=str, - required=True, - ) - parser.add_argument( - "--debug", - "-d", - help="Enable debug output", - action="store_true", - ) - parser.add_argument( - "--pretend", - "-p", - help="Do not do anything, just print out what WOULD be done", - action="store_true", - ) - subparsers = parser.add_subparsers() - mirror_parser = subparsers.add_parser("mirror", help="manage aptly mirrors") - mirror_parser.set_defaults(func=mirror) - mirror_parser.add_argument("task", type=str, choices=["create", "update"]) - mirror_parser.add_argument("mirror_name", type=str, nargs="?", default="all") - snap_parser = subparsers.add_parser("snapshot", help="manage aptly snapshots") - snap_parser.set_defaults(func=snapshot) - snap_parser.add_argument("task", type=str, choices=["create", "update"]) - snap_parser.add_argument("snapshot_name", type=str, nargs="?", default="all") - publish_parser = subparsers.add_parser( - "publish", help="manage aptly publish endpoints" - ) - publish_parser.set_defaults(func=publish) - publish_parser.add_argument("task", type=str, choices=["create", "update"]) - publish_parser.add_argument("publish_name", type=str, nargs="?", default="all") - repo_parser = subparsers.add_parser("repo", help="manage aptly repositories") - repo_parser.set_defaults(func=repo) - repo_parser.add_argument("task", type=str, choices=["create"]) - repo_parser.add_argument("repo_name", type=str, nargs="?", default="all") - - args = parser.parse_args(argv) - root = logging.getLogger() - formatter = logging.Formatter( - "%(asctime)s - %(name)s - %(levelname)s - %(message)s" - ) - if not _logging_setup: # noqa - handler = logging.StreamHandler(sys.stderr) - handler.setFormatter(formatter) - root.addHandler(handler) - handler.setLevel(logging.CRITICAL) - if args.debug: - root.setLevel(logging.DEBUG) - handler.setLevel(logging.DEBUG) - if args.pretend: - Command.pretend_mode = True - else: - Command.pretend_mode = False - - _logging_setup = True # noqa - lg.debug("Args: %s", vars(args)) - - with codecs.open(args.config, "r", encoding="UTF-8") as cfgfile: - cfg = yaml.load(cfgfile, Loader=yaml.FullLoader) - state.read() - - # run function for selected subparser - args.func(cfg, args) - - -day_of_week_map = { - "mon": 1, - "tue": 2, - "wed": 3, - "thu": 4, - "fri": 5, - "sat": 6, - "sun": 7, -} - - -def expand_timestamped_name(name, timestamp_config, date=None): - """Expand a timestamped name using round_timestamp. - - :param timestamp_config: Contains the recurrence specification for the - timestamp. See :func:`round_timestamp` - :type timestamp_config: dict - :param date: The date to expand the timestamp with. - :type date: :py:class:`datetime.datetime`""" - if "%T" not in name: - return name - timestamp = round_timestamp(timestamp_config, date) - return name.replace("%T", timestamp.strftime("%Y%m%dT%H%MZ")) - - -def round_timestamp(timestamp_config, date=None): - """Round the given name by adding a timestamp. - - The contents of the timestamp is configured by the given timestamp_config - dict, which MUST contain a "time" key, and MAY contain a "repeat-weekly" - key. - - If the key "repeat-weekly" is given, it is expected to contain a - three-letter weekday name (mon, tue, thu, ...). The "time" key is expected - to be a 24 hour HH:MM time specification. - - Timestamps are rounded down to the nearest time as specified (which may be - on the previous day. If repeat-weekly is specified, it is rounded down - (back in time) to the given weekday.) - - The name parameter may be a simple string. If it contains the marker "%T", - then this placeholder will be replaced by the timestamp. If it does NOT - contain that marker, then nothing happens (and the timestamp_config is not - evaluated at all) - - If a datetime object is given as third parameter, then it is used to - generate the timestamp. If it is omitted, the current date/time is used. - - Example: - >>> expand_timestamped_name( - ... 'foo-%T', - ... {'timestamp': {'time': '00:00'}}, - ... datetime.datetime(2015,10,7, 15,30) # A Wednesday - ... ) - 'foo-20151007T0000Z' - - >>> expand_timestamped_name( - ... 'foo-%T', - ... {'timestamp': {'time': '00:00', 'repeat-weekly': 'mon'}}, - ... datetime.datetime(2015,10,8, 15,30) # A Thursday - ... ) - 'foo-20151005T0000Z' - - >>> expand_timestamped_name( - ... 'foo', # No %T placeholder, timestamp info is ignored - ... {'timestamp': {'time': '00:00', 'repeat-weekly': 'mon'}}, - ... datetime.datetime(2015,10,8, 15,30) - ... ) - 'foo' - - :param timestamp_config: Contains the recurrence specification for the - timestamp. - :type timestamp_config: dict - :param date: The date to expand the timestamp with. - :type date: :py:class:`datetime.datetime` - """ - timestamp_info = timestamp_config.get("timestamp", timestamp_config) - config_time = timestamp_info.get("time", "FAIL") - if config_time == "FAIL": # pragma: no cover - raise ValueError( - "Timestamp config has no valid time entry: %s" % str(timestamp_config) - ) - - config_repeat_weekly = timestamp_info.get("repeat-weekly", None) - - hour, minute = [int(x) for x in config_time.split(":")][:2] - - if date is None: - date = datetime.datetime.now() - - if config_repeat_weekly is not None: - day_of_week = day_of_week_map.get(config_repeat_weekly.lower()) - - timestamp = date_round_weekly( - date, day_of_week, datetime.time(hour=hour, minute=minute) - ) - else: - timestamp = date_round_daily(date, datetime.time(hour=hour, minute=minute)) - return timestamp - - -def unit_or_list_to_list(thingy): - """Ensures that a yml entry is always a list. Used to allow lists and - single units in the yml file. - - :param thingy: The data to ensure it is a list - :type thingy: list, tuple or other""" - if isinstance(thingy, list) or isinstance(thingy, tuple): - return list(thingy) - else: - return [thingy] - - -def publish_cmd_create(cfg, publish_name, publish_config, ignore_existing=False): - """Creates a publish command with its dependencies to be ordered and - executed later. - - :param cfg: pyaptly config - :type cfg: dict - :param publish_name: Name of the publish to create - :type publish_name: str - :param publish_config: Configuration of the publish from the yml file. - :type publish_config: dict""" - publish_fullname = "%s %s" % (publish_name, publish_config["distribution"]) - if publish_fullname in state.publishes and not ignore_existing: - # Nothing to do, publish already created - return - - publish_cmd = ["aptly", "publish"] - options = [] - source_args = [] - endpoint_args = [publish_name] - - has_source = False - num_sources = 0 - - for conf, conf_value in publish_config.items(): - if conf == "skip-contents": - if conf_value: - options.append("-skip-contents=true") - elif conf == "architectures": # pragma: no cover - options.append( - "-architectures=%s" % ",".join(unit_or_list_to_list(conf_value)) - ) - elif conf == "components": - components = unit_or_list_to_list(conf_value) - options.append("-component=%s" % ",".join(components)) - elif conf == "label": # pragma: no cover - options.append("-label=%s" % conf_value) - elif conf == "origin": # pragma: no cover - options.append("-origin=%s" % conf_value) - - elif conf == "distribution": - options.append("-distribution=%s" % conf_value) - - elif conf == "gpg-key": - options.append("-gpg-key=%s" % conf_value) - elif conf == "automatic-update": - # Ignored here - pass - elif conf == "snapshots": - if has_source: # pragma: no cover - raise ValueError( - "Multiple sources for publish %s %s" - % (publish_name, publish_config) - ) - has_source = True - snapshots = unit_or_list_to_list(conf_value) - source_args.append("snapshot") - source_args.extend( - [snapshot_spec_to_name(cfg, conf_value) for conf_value in snapshots] - ) - - num_sources = len(snapshots) - - elif conf == "repo": - if has_source: # pragma: no cover - raise ValueError( - "Multiple sources for publish %s %s" - % (publish_name, publish_config) - ) - has_source = True - source_args = ["repo", conf_value] - num_sources = 1 - elif conf == "publish": - if has_source: # pragma: no cover - raise ValueError( - "Multiple sources for publish %s %s" - % (publish_name, publish_config) - ) - has_source = True - conf_value = " ".join(conf_value.split("/")) - source_args.append("snapshot") - try: - sources = state.publish_map[conf_value] - except KeyError: - lg.critical( - ( - "Creating %s has been deferred, please call publish " - "create again" - ) - % publish_name - ) - return - source_args.extend(sources) - num_sources = len(sources) - else: # pragma: no cover - raise ValueError( - "Don't know how to handle publish config entry %s in %s" - % ( - conf, - publish_name, - ) - ) - assert has_source - assert len(components) == num_sources - - return Command(publish_cmd + options + source_args + endpoint_args) - - -def clone_snapshot(origin, destination): - """Creates a clone snapshot command with dependencies to be ordered and - executed later. - - :param origin: The snapshot to clone - :type origin: str - :param destination: The new name of the snapshot - :type destination: str""" - cmd = Command(["aptly", "snapshot", "merge", destination, origin]) - cmd.provide("snapshot", destination) - cmd.require("snapshot", origin) - return cmd - - -def publish_cmd_update(cfg, publish_name, publish_config, ignore_existing=False): - """Creates a publish command with its dependencies to be ordered and - executed later. - - :param cfg: pyaptly config - :type cfg: dict - :param publish_name: Name of the publish to update - :type publish_name: str - :param publish_config: Configuration of the publish from the yml file. - :type publish_config: dict""" - - publish_cmd = ["aptly", "publish"] - options = [] - args = [publish_config["distribution"], publish_name] - - if "skip-contents" in publish_config and publish_config["skip-contents"]: - options.append("-skip-contents=true") - - if "repo" in publish_config: - publish_cmd.append("update") - return Command(publish_cmd + options + args) - - publish_fullname = "%s %s" % (publish_name, publish_config["distribution"]) - current_snapshots = state.publish_map[publish_fullname] - if "snapshots" in publish_config: - snapshots_config = publish_config["snapshots"] - new_snapshots = [snapshot_spec_to_name(cfg, snap) for snap in snapshots_config] - elif "publish" in publish_config: - conf_value = publish_config["publish"] - snapshots_config = [] - ref_publish_name, distribution = conf_value.split(" ") - for publish in cfg["publish"][ref_publish_name]: - if publish["distribution"] == distribution: - snapshots_config.extend(publish["snapshots"]) - break - new_snapshots = list(state.publish_map[conf_value]) - else: # pragma: no cover - raise ValueError( - "No snapshot references configured in publish %s" % publish_name - ) - - if set(new_snapshots) == set(current_snapshots) and not ignore_existing: - # Already pointing to the newest snapshot, nothing to do - return - components = unit_or_list_to_list(publish_config["components"]) - - for snap in snapshots_config: - # snap may be a plain name or a dict.. - if hasattr(snap, "items"): - # Dict mode - only here can we even have an archive option - archive = snap.get("archive-on-update", None) - - if archive: - # Replace any timestamp placeholder with the current - # date/time. Note that this is NOT rounded, as we want to - # know exactly when the archival happened. - archive = archive.replace( - "%T", format_timestamp(datetime.datetime.now()) - ) - if archive in state.snapshots: # pragma: no cover - continue - prefix_to_search = re.sub("%T$", "", snap["name"]) - - current_snapshot = [ - snap_name - for snap_name in sorted(current_snapshots, key=lambda x: -len(x)) - if snap_name.startswith(prefix_to_search) - ][0] - - clone_snapshot(current_snapshot, archive).execute() - - publish_cmd.append("switch") - options.append("-component=%s" % ",".join(components)) - - if "skip-contents" in publish_config and publish_config["skip-contents"]: - options.append("-skip-contents=true") - - return Command(publish_cmd + options + args + new_snapshots) - - -def repo_cmd_create(cfg, repo_name, repo_config): - """Create a repo create command to be ordered and executed later. - - :param cfg: pyaptly config - :type cfg: dict - :param repo_name: Name of the repo to create - :type repo_name: str - :param repo_config: Configuration of the repo from the yml file. - :type repo_config: dict""" - - if repo_name in state.repos: # pragma: no cover - # Nothing to do, repo already created - return - - repo_cmd = ["aptly", "repo"] - options = [] - endpoint_args = ["create", repo_name] - - for conf, conf_value in repo_config.items(): - if conf == "architectures": - options.append( - "-architectures=%s" % ",".join(unit_or_list_to_list(conf_value)) - ) - elif conf == "component": - components = unit_or_list_to_list(conf_value) - options.append("-component=%s" % ",".join(components)) - elif conf == "comment": # pragma: no cover - options.append("-comment=%s" % conf_value) - elif conf == "distribution": - options.append("-distribution=%s" % conf_value) - else: # pragma: no cover - raise ValueError( - "Don't know how to handle repo config entry %s in %s" - % ( - conf, - repo_name, - ) - ) - - return Command(repo_cmd + options + endpoint_args) - - -def repo(cfg, args): - """Creates repository commands, orders and executes them. - - :param cfg: The configuration yml as dict - :type cfg: dict - :param args: The command-line arguments read with :py:mod:`argparse` - :type args: namespace""" - lg.debug("Repositories to create: %s", cfg["repo"]) - - repo_cmds = { - "create": repo_cmd_create, - } - - cmd_repo = repo_cmds[args.task] - - if args.repo_name == "all": - commands = [ - cmd_repo(cfg, repo_name, repo_conf) - for repo_name, repo_conf in cfg["repo"].items() - ] - - for cmd in Command.order_commands(commands, state.has_dependency): - cmd.execute() - - else: - if args.repo_name in cfg["repo"]: - commands = [cmd_repo(cfg, args.repo_name, cfg["repo"][args.repo_name])] - for cmd in Command.order_commands(commands, state.has_dependency): - cmd.execute() - else: - raise ValueError( - "Requested publish is not defined in config file: %s" % (args.repo_name) - ) - - -def publish(cfg, args): - """Creates publish commands, orders and executes them. - - :param cfg: The configuration yml as dict - :type cfg: dict - :param args: The command-line arguments read with :py:mod:`argparse` - :type args: namespace""" - lg.debug("Publishes to create / update: %s", cfg["publish"]) - - # aptly publish snapshot -components ... -architectures ... -distribution - # ... -origin Ubuntu trusty-stable ubuntu/stable - - publish_cmds = { - "create": publish_cmd_create, - "update": publish_cmd_update, - } - - cmd_publish = publish_cmds[args.task] - - if args.publish_name == "all": - commands = [ - cmd_publish(cfg, publish_name, publish_conf_entry) - for publish_name, publish_conf in cfg["publish"].items() - for publish_conf_entry in publish_conf - if publish_conf_entry.get("automatic-update", "false") is True - ] - - for cmd in Command.order_commands(commands, state.has_dependency): - cmd.execute() - - else: - if args.publish_name in cfg["publish"]: - commands = [ - cmd_publish(cfg, args.publish_name, publish_conf_entry) - for publish_conf_entry in cfg["publish"][args.publish_name] - ] - for cmd in Command.order_commands(commands, state.has_dependency): - cmd.execute() - else: - raise ValueError( - "Requested publish is not defined in config file: %s" - % (args.publish_name) - ) - - -def snapshot(cfg, args): - """Creates snapshot commands, orders and executes them. - - :param cfg: The configuration yml as dict - :type cfg: dict - :param args: The command-line arguments read with :py:mod:`argparse` - :type args: namespace""" - lg.debug("Snapshots to create: %s", cfg["snapshot"].keys()) - - snapshot_cmds = { - "create": cmd_snapshot_create, - "update": cmd_snapshot_update, - } - - cmd_snapshot = snapshot_cmds[args.task] - - if args.snapshot_name == "all": - commands = [ - cmd - for snapshot_name, snapshot_config in cfg["snapshot"].items() - for cmd in cmd_snapshot(cfg, snapshot_name, snapshot_config) - ] - - if args.debug: # pragma: no cover - dot_file = "/tmp/commands.dot" - with codecs.open(dot_file, "w", "UTF-8") as fh_dot: - fh_dot.write(Command.command_list_to_digraph(commands)) - lg.info("Wrote command dependency tree graph to %s", dot_file) - - if len(commands) > 0: - for cmd in Command.order_commands(commands, state.has_dependency): - cmd.execute() - - else: - if args.snapshot_name in cfg["snapshot"]: - commands = cmd_snapshot( - cfg, args.snapshot_name, cfg["snapshot"][args.snapshot_name] - ) - - if len(commands) > 0: - for cmd in Command.order_commands(commands, state.has_dependency): - cmd.execute() - - else: - raise ValueError( - "Requested snapshot is not defined in config file: %s" - % (args.snapshot_name) - ) - - -def format_timestamp(timestamp): - """Wrapper for strftime, to ensure we're all using the same format. - - :param timestamp: The timestamp to format - :type timestamp: :py:class:`datetime.datetime`""" - return timestamp.strftime("%Y%m%dT%H%MZ") - - -back_reference_map = { - "current": 0, - "previous": 1, -} - - -def snapshot_spec_to_name(cfg, snapshot): - """Converts a given snapshot short spec to a name. - - A short spec is a value that may either be a string or a dict. - - If it's a string, everything is fine and we just use that as - a snapshot name. - - However if it's a dict, we assume it has the following keys: - - * name: template for the snapshot - * timestamp: information on how to generate the timestamp. - - For further information regarding the timestamp's data structure, - consult the documentation of expand_timestamped_name(). - - :param cfg: Complete yaml config - :type cfg: dict - :param snapshot: Config of the snapshot - :type snapshot: dict - """ - delta = datetime.timedelta(seconds=1) - if hasattr(snapshot, "items"): - name = snapshot["name"] - if "timestamp" not in snapshot: - return name - - ts = snapshot["timestamp"] - back_ref = back_reference_map.get(ts) - if back_ref is None: - back_ref = int(ts) - reference = cfg["snapshot"][name] - - timestamp = datetime.datetime.now() - for _ in range(back_ref + 1): - timestamp = round_timestamp(reference["timestamp"], timestamp) - timestamp -= delta - - timestamp += delta - return name.replace("%T", format_timestamp(timestamp)) - else: # pragma: no cover - return snapshot - - -def dependents_of_snapshot(snapshot_name): - """Yield a flat list of dependents from the current state. - - :rtype: generator""" - for dependent in state.snapshot_map.get(snapshot_name, []): - yield dependent - for sub in dependents_of_snapshot(dependent): # pragma: no cover - yield dependent - - -def rotate_snapshot(cfg, snapshot_name): - """Creates a command to rotate a snapshot in order to be able to update a - current publish. - - :param cfg: pyaptly config - :type cfg: dict - :param snapshot_name: the snapshot to rotate - :type snapshot_name: str""" - rotated_name = cfg["snapshot"][snapshot_name].get( - "rotate_via", - "%s-rotated-%s" % (snapshot_name, format_timestamp(datetime.datetime.now())), - ) - - # First, verify that our snapshot environment is in a sane state. - # Fixing the environment is not currently our task. - - if rotated_name in state.snapshots: # pragma: no cover - raise Exception( - "Cannot update snapshot %s - rotated name %s already exists" - % (snapshot_name, rotated_name) - ) - - cmd = Command(["aptly", "snapshot", "rename", snapshot_name, rotated_name]) - - cmd.provide("virtual", rotated_name) - return cmd - - -def cmd_snapshot_update(cfg, snapshot_name, snapshot_config): - """Create commands to update all rotating snapshots. - - :param cfg: pyaptly config - :type cfg: dict - :param snapshot_name: Name of the snapshot to update/rotate - :type snapshot_name: str - :param snapshot_config: Configuration of the snapshot from the yml file. - :type snapshot_config: dict""" - - # To update a snapshot, we need to do roughly the following steps: - # 1) Rename the current snapshot and all snapshots that depend on it - # 2) Create new version of the snapshot and all snapshots that depend on it - # 3) Recreate all renamed snapshots - # 4) Update / switch-over publishes - # 5) Remove the rotated temporary snapshots - - if "%T" in snapshot_name: # pragma: no cover - # Timestamped snapshots are never rotated by design. - return [] - - affected_snapshots = [snapshot_name] - affected_snapshots.extend(list(dependents_of_snapshot(snapshot_name))) - - # TODO: rotated snapshots should be identified by configuration option, not - # just by "not being timestamped - - rename_cmds = [rotate_snapshot(cfg, snap) for snap in affected_snapshots] - - # The "intermediate" command causes the state reader to refresh. At the - # same time, it provides a collection point for dependency handling. - intermediate = FunctionCommand(state.read) - intermediate.provide("virtual", "all-snapshots-rotated") - - for cmd in rename_cmds: - # Ensure that our "intermediate" pseudo command comes after all - # the rename commands, by ensuring it depends on all their "virtual" - # provided items. - cmd_vprovides = [ - provide for ptype, provide in cmd.get_provides() if ptype == "virtual" - ] - for provide in cmd_vprovides: - intermediate.require("virtual", provide) - - # Same as before - create a focal point to "collect" dependencies - # after the snapshots have been rebuilt. Also reload state once again - intermediate2 = FunctionCommand(state.read) - intermediate2.provide("virtual", "all-snapshots-rebuilt") - - create_cmds = [] - for _ in affected_snapshots: - # Well.. there's normally just one, but since we need interface - # consistency, cmd_snapshot_create() returns a list. And since it - # returns a list, we may just as well future-proof it and loop instead - # of assuming it's going to be a single entry (and fail horribly if - # this assumption changes in the future). - for create_cmd in cmd_snapshot_create( - cfg, snapshot_name, cfg["snapshot"][snapshot_name], ignore_existing=True - ): - # enforce cmd to run after the refresh, and thus also - # after all the renames - create_cmd.require("virtual", "all-snapshots-rotated") - - # Evil hack - we must do the dependencies ourselves, to avoid - # getting a circular graph - create_cmd._requires = set( - [ - (type_, req) - for type_, req in create_cmd._requires - if type_ != "snapshot" - ] - ) - - create_cmd.provide("virtual", "readyness-for-%s" % snapshot_name) - for follower in dependents_of_snapshot(snapshot_name): - create_cmd.require("virtual", "readyness-for-%s" % follower) - - # "Focal point" - make intermediate2 run after all the commands - # that re-create the snapshots - create_cmd.provide("virtual", "rebuilt-%s" % snapshot_name) - intermediate2.require("virtual", "rebuilt-%s" % snapshot_name) - - create_cmds.append(create_cmd) - - # At this point, snapshots have been renamed, then recreated. - # After each of the steps, the system state has been re-read. - # So now, we're left with updating the publishes. - - def is_publish_affected(name, publish): - if "%s %s" % (name, publish["distribution"]) in state.publishes: - try: - for snap in publish["snapshots"]: - snap_name = snapshot_spec_to_name(cfg, snap) - if snap_name in affected_snapshots: - return True - except KeyError: # pragma: no cover - lg.debug( - ( - "Publish endpoint %s is not affected because it has no " - "snapshots defined" - ) - % name - ) - return False - return False - - if "publish" in cfg: - all_publish_commands = [ - publish_cmd_update( - cfg, publish_name, publish_conf_entry, ignore_existing=True - ) - for publish_name, publish_conf in cfg["publish"].items() - for publish_conf_entry in publish_conf - if publish_conf_entry.get("automatic-update", "false") is True - if is_publish_affected(publish_name, publish_conf_entry) - ] - else: - all_publish_commands = [] - - republish_cmds = [c for c in all_publish_commands if c] - - # Ensure that the republish commands run AFTER the snapshots are rebuilt - for cmd in republish_cmds: - cmd.require("virtual", "all-snapshots-rebuilt") - - # TODO: - # - We need to cleanup all the rotated snapshots after the publishes are - # rebuilt - # - Filter publishes, so only the non-timestamped publishes are rebuilt - - return rename_cmds + create_cmds + republish_cmds + [intermediate, intermediate2] - - -def cmd_snapshot_create(cfg, snapshot_name, snapshot_config, ignore_existing=False): - """Create a snapshot create command to be ordered and executed later. - - :param cfg: pyaptly config - :type cfg: dict - :param snapshot_name: Name of the snapshot to create - :type snapshot_name: str - :param snapshot_config: Configuration of the snapshot from the yml file. - :type snapshot_config: dict - :param ignore_existing: Optional, defaults to False. If set to True, still - return a command object even if the requested - snapshot already exists - :type ignore_existing: dict - - :rtype: Command - """ - - # TODO: extract possible timestamp component - # and generate *actual* snapshot name - - snapshot_name = expand_timestamped_name(snapshot_name, snapshot_config) - - if snapshot_name in state.snapshots and not ignore_existing: - return [] - - default_aptly_cmd = ["aptly", "snapshot", "create"] - default_aptly_cmd.append(snapshot_name) - default_aptly_cmd.append("from") - - if "mirror" in snapshot_config: - cmd = Command(default_aptly_cmd + ["mirror", snapshot_config["mirror"]]) - cmd.provide("snapshot", snapshot_name) - cmd.require("mirror", snapshot_config["mirror"]) - return [cmd] - - elif "repo" in snapshot_config: - cmd = Command(default_aptly_cmd + ["repo", snapshot_config["repo"]]) - cmd.provide("snapshot", snapshot_name) - cmd.require("repo", snapshot_config["repo"]) - return [cmd] - - elif "filter" in snapshot_config: - cmd = Command( - [ - "aptly", - "snapshot", - "filter", - snapshot_spec_to_name(cfg, snapshot_config["filter"]["source"]), - snapshot_name, - snapshot_config["filter"]["query"], - ] - ) - cmd.provide("snapshot", snapshot_name) - cmd.require( - "snapshot", snapshot_spec_to_name(cfg, snapshot_config["filter"]["source"]) - ) - return [cmd] - - elif "merge" in snapshot_config: - cmd = Command( - [ - "aptly", - "snapshot", - "merge", - snapshot_name, - ] - ) - cmd.provide("snapshot", snapshot_name) - - for source in snapshot_config["merge"]: - source_name = snapshot_spec_to_name(cfg, source) - cmd.append(source_name) - cmd.require("snapshot", source_name) - - return [cmd] - - else: # pragma: no cover - raise ValueError("Don't know how to handle snapshot config" % (snapshot_config)) - - -def mirror(cfg, args): - """Creates mirror commands, orders and executes them. - - :param cfg: The configuration yml as dict - :type cfg: dict - :param args: The command-line arguments read with :py:mod:`argparse` - :type args: namespace""" - lg.debug("Mirrors to create: %s", cfg["mirror"]) - - mirror_cmds = { - "create": cmd_mirror_create, - "update": cmd_mirror_update, - } - - cmd_mirror = mirror_cmds[args.task] - - if args.mirror_name == "all": - for mirror_name, mirror_config in cfg["mirror"].items(): - cmd_mirror(cfg, mirror_name, mirror_config) - else: - if args.mirror_name in cfg["mirror"]: - cmd_mirror(cfg, args.mirror_name, cfg["mirror"][args.mirror_name]) - else: - raise ValueError( - "Requested mirror is not defined in config file: %s" - % (args.mirror_name) - ) - - -def add_gpg_keys(mirror_config): - """Uses the gpg command-line to download and add gpg keys needed to create - mirrors. - - :param mirror_config: The configuration yml as dict - :type mirror_config: dict - """ - keys_urls = {} - if "gpg-keys" in mirror_config: - keys = unit_or_list_to_list(mirror_config["gpg-keys"]) - if "gpg-urls" in mirror_config: - urls = unit_or_list_to_list(mirror_config["gpg-urls"]) - urls_len = len(urls) - for x in range(len(keys)): - if x < urls_len: - url = urls[x] - else: # pragma: no cover - url = None - keys_urls[keys[x]] = url - else: - for key in keys: - keys_urls[key] = None - - for key in keys_urls.keys(): - if key in state.gpg_keys: - continue - try: - key_command = [ - "gpg", - "--no-default-keyring", - "--keyring", - "trustedkeys.gpg", - "--keyserver", - "hkp://127.0.0.1:8080", - "--recv-keys", - key, - ] - lg.debug("Adding gpg key with call: %s", key_command) - subprocess.check_call(key_command) - except subprocess.CalledProcessError: # pragma: no cover - url = keys_urls[key] - if url: - key_command = ( - "curl %s | " - "gpg --no-default-keyring --keyring trustedkeys.gpg " - "--import" - ) % url - subprocess.check_call(["bash", "-c", key_command]) - else: - raise - state.read_gpg() - - -def cmd_mirror_create(cfg, mirror_name, mirror_config): - """Create a mirror create command to be ordered and executed later. - - :param cfg: The configuration yml as dict - :type cfg: dict - :param mirror_name: Name of the mirror to create - :type mirror_name: str - :param mirror_config: Configuration of the snapshot from the yml file. - :type mirror_config: dict""" - - if mirror_name in state.mirrors: # pragma: no cover - return - - add_gpg_keys(mirror_config) - aptly_cmd = ["aptly", "mirror", "create"] - - if "sources" in mirror_config and mirror_config["sources"]: - aptly_cmd.append("-with-sources") - else: - aptly_cmd.append("-with-sources=false") - - if "udeb" in mirror_config and mirror_config["udeb"]: - aptly_cmd.append("-with-udebs") - - if "architectures" in mirror_config: - aptly_cmd.append( - "-architectures={0}".format( - ",".join(unit_or_list_to_list(mirror_config["architectures"])) - ) - ) - - aptly_cmd.append(mirror_name) - aptly_cmd.append(mirror_config["archive"]) - aptly_cmd.append(mirror_config["distribution"]) - aptly_cmd.extend(unit_or_list_to_list(mirror_config["components"])) - - lg.debug("Running command: %s", " ".join(aptly_cmd)) - subprocess.check_call(aptly_cmd) - - -def cmd_mirror_update(cfg, mirror_name, mirror_config): - """Create a mirror update command to be ordered and executed later. - - :param cfg: pyaptly config - :type cfg: dict - :param mirror_name: Name of the mirror to create - :type mirror_name: str - :param mirror_config: Configuration of the snapshot from the yml file. - :type mirror_config: dict""" - if mirror_name not in state.mirrors: # pragma: no cover - raise Exception("Mirror not created yet") - add_gpg_keys(mirror_config) - aptly_cmd = ["aptly", "mirror", "update"] - if "max-tries" in mirror_config: - aptly_cmd.append("-max-tries=%d" % mirror_config["max-tries"]) - - aptly_cmd.append(mirror_name) - lg.debug("Running command: %s", " ".join(aptly_cmd)) - subprocess.check_call(aptly_cmd) - - -if __name__ == "__main__": # pragma: no cover - main() +from pyaptly.legacy import SystemStateReader, main # type: ignore # TODO # noqa: F401 diff --git a/pyaptly/aptly_test.py b/pyaptly/aptly_test.py index 6557740..949e5dd 100644 --- a/pyaptly/aptly_test.py +++ b/pyaptly/aptly_test.py @@ -1,3 +1,6 @@ +# type: ignore # TODO +# flake8: noqa # TODO + """Testing pyaptly""" import contextlib import logging @@ -7,8 +10,13 @@ import pytest import testfixtures -from pyaptly import (Command, SystemStateReader, call_output, main, - snapshot_spec_to_name) +from pyaptly.legacy import ( + Command, + SystemStateReader, + call_output, + main, + snapshot_spec_to_name, +) from . import test @@ -25,7 +33,7 @@ def mock_subprocess(): """Mock subprocess that no commands are executed""" call = mock.patch("subprocess.check_call") - output = mock.patch("pyaptly.call_output") + output = mock.patch("pyaptly.legacy.call_output") yield (call.start(), output.start()) call.stop() output.stop() diff --git a/pyaptly/cli.py b/pyaptly/cli.py index 1dd34f9..caa3a61 100644 --- a/pyaptly/cli.py +++ b/pyaptly/cli.py @@ -1,3 +1,4 @@ +"""python-click based command line interface for pyaptly.""" from pathlib import Path import click @@ -14,7 +15,8 @@ default=False, help="Add default values to fields if missing", ) -def cli(debug): +def cli(debug: bool): + """Show basic command group.""" from pyaptly import util util._DEBUG = debug @@ -22,7 +24,8 @@ def cli(debug): @cli.command(help="run legacy command parser") def legacy(): - from pyaptly import main + """Run legacy pyaptly cli.""" + from pyaptly import main # type: ignore # TODO main() @@ -43,7 +46,7 @@ def legacy(): type=click.Path( file_okay=True, dir_okay=False, - exists=None, + exists=False, writable=True, path_type=Path, ), @@ -55,7 +58,8 @@ def legacy(): default=False, help="Add default values to fields if missing", ) -def yaml_to_toml(yaml_path, toml_path, add_defaults): +def yaml_to_toml(yaml_path: Path, toml_path: Path, add_defaults: bool): + """Convert pyaptly config files from yaml to toml.""" from pyaptly import config_file config_file.yaml_to_toml( diff --git a/pyaptly/config_file.py b/pyaptly/config_file.py index fce38f6..8bad8ab 100644 --- a/pyaptly/config_file.py +++ b/pyaptly/config_file.py @@ -1,3 +1,4 @@ +"""Handling pyaptly config-files.""" from pathlib import Path import toml @@ -5,15 +6,20 @@ def yaml_to_toml(yaml_path: Path, toml_path: Path, *, add_defaults: bool = False): + """Convert pyaptly config files from yaml to toml. + + Setting `add_defaults=True` will set common default during conversion. + """ with yaml_path.open("r", encoding="UTF-8") as yf: with toml_path.open("w", encoding="UTF-8") as tf: config = yaml.safe_load(yf) if add_defaults: - add_dehfault_to_config(config) + add_default_to_config(config) toml.dump(config, tf) -def add_dehfault_to_config(config): +def add_default_to_config(config): + """Set common default in config if the fields are missing.""" if "mirror" in config: for mirror in config["mirror"].values(): if "components" not in mirror: diff --git a/pyaptly/conftest.py b/pyaptly/conftest.py index a73f4b6..8a135ee 100644 --- a/pyaptly/conftest.py +++ b/pyaptly/conftest.py @@ -1,3 +1,5 @@ +"""pytest conftest.""" + import json import logging import os @@ -14,6 +16,7 @@ @pytest.fixture() def debug_mode(): + """Enable debug mode, set log-level and log run() commands.""" from pyaptly import util level = logging.root.getEffectiveLevel() @@ -28,15 +31,27 @@ def debug_mode(): logging.root.setLevel(level) +@pytest.fixture() +def test_path(): + """Return test_base as test_path to find assets for testing.""" + yield test_base + + @pytest.fixture() def environment(debug_mode): + """Get a test environment. + + - An aptly config and directory + - An gnupg directory + - web-server and key-server are always running in the docker-container + """ tempdir_obj = tempfile.TemporaryDirectory() tempdir = Path(tempdir_obj.name).absolute() aptly = tempdir / "aptly" aptly.mkdir(parents=True) config = {"rootDir": str(aptly)} - if aptly_conf.exists(): + if aptly_conf.exists(): # pragma: no cover aptly_conf.unlink() with aptly_conf.open("w") as f: json.dump(config, f) @@ -56,6 +71,16 @@ def environment(debug_mode): @pytest.fixture() def config(request): + """Get a config. + + Can be configured with: + + ```python + @pytest.mark.parametrize("config", ["mirror-extra.toml"], indirect=True) + def test_mirror_create(environment, config, caplog): + ... + ``` + """ config_file = test_base / request.param with config_file.open("r", encoding="UTF-8") as f: config = toml.load(f) diff --git a/pyaptly/dateround_test.py b/pyaptly/dateround_test.py index c12aed1..755ec32 100644 --- a/pyaptly/dateround_test.py +++ b/pyaptly/dateround_test.py @@ -1,25 +1,34 @@ +# type: ignore # TODO +# flake8: noqa # TODO + """Dateround tests""" import datetime import os.path import sys -from . import (date_round_daily, date_round_weekly, iso_to_gregorian, # noqa - snapshot_spec_to_name, test, time_delta_helper, time_remove_tz) +from . import test +from .legacy import ( + date_round_daily, + date_round_weekly, + iso_to_gregorian, + snapshot_spec_to_name, + time_delta_helper, + time_remove_tz, +) -_test_base = os.path.dirname( - os.path.abspath(__file__) -).encode("UTF-8") +_test_base = os.path.dirname(os.path.abspath(__file__)).encode("UTF-8") if not sys.version_info < (2, 7): # pragma: no cover from hypothesis import given # noqa - from hypothesis.strategies import datetimes, times # noqa from hypothesis.strategies import integers # noqa + from hypothesis.strategies import datetimes, times # noqa if sys.version_info < (2, 7): # pragma: no cover import mock + given = mock.MagicMock() # noqa datetimes = mock.MagicMock() # noqa times = mock.MagicMock() # noqa @@ -31,26 +40,27 @@ def test_is_to_gregorian(date): # pragma: no cover """Test if a roundtrip of isoclander() -> iso_to_gregorian() is correct""" iso_tuple = date.isocalendar() - new_date = iso_to_gregorian(*iso_tuple) - assert date.year == new_date.year + new_date = iso_to_gregorian(*iso_tuple) + assert date.year == new_date.year assert date.month == new_date.month - assert date.day == new_date.day + assert date.day == new_date.day @test.hypothesis_min_ver @given( datetimes(min_value=datetime.datetime(year=2, month=1, day=1)), integers(min_value=1, max_value=7), - times()) + times(), +) def test_round_weekly(date, day_of_week, time): # pragma: no cover """Test if the round function rounds the expected delta""" - time = time_remove_tz(time) - round_date = date_round_weekly(date, day_of_week, time) - date_time = datetime.time( - hour = date.hour, - minute = date.minute, - second = date.second, - microsecond = date.microsecond, + time = time_remove_tz(time) + round_date = date_round_weekly(date, day_of_week, time) + date_time = datetime.time( + hour=date.hour, + minute=date.minute, + second=date.second, + microsecond=date.microsecond, ) # double round assert round_date == date_round_weekly(round_date, day_of_week, time) @@ -64,20 +74,18 @@ def test_round_weekly(date, day_of_week, time): # pragma: no cover # Never round more than 7 days assert date - round_date < datetime.timedelta(days=7) # Check if rounded on given time and day - assert round_date.hour == time.hour - assert round_date.minute == time.minute - assert round_date.second == time.second - assert round_date.microsecond == time.microsecond + assert round_date.hour == time.hour + assert round_date.minute == time.minute + assert round_date.second == time.second + assert round_date.microsecond == time.microsecond assert round_date.isoweekday() == day_of_week # Expected delta date_delta = date - round_date - date_day_time_delta = ( - time_delta_helper(date_time) + - datetime.timedelta(days=date.weekday()) + date_day_time_delta = time_delta_helper(date_time) + datetime.timedelta( + days=date.weekday() ) - given_day_time_delta = ( - time_delta_helper(time) + - datetime.timedelta(days=day_of_week - 1) + given_day_time_delta = time_delta_helper(time) + datetime.timedelta( + days=day_of_week - 1 ) delta = date_day_time_delta - given_day_time_delta if date_day_time_delta > given_day_time_delta: @@ -90,26 +98,23 @@ def test_round_weekly(date, day_of_week, time): # pragma: no cover def test_weekly_examples(): """Test if the round function rounds at the given day and time""" - date = datetime.datetime( - year = 2015, - month = 11, - day = 3, - hour = 22, - minute = 59, - ) - time = datetime.time( - hour = 23, - minute = 0 + date = datetime.datetime( + year=2015, + month=11, + day=3, + hour=22, + minute=59, ) + time = datetime.time(hour=23, minute=0) day_of_week = 2 rounded = date_round_weekly(date, day_of_week, time) assert datetime.datetime(2015, 10, 27, 23, 0) == rounded - date = datetime.datetime( - year = 2015, - month = 11, - day = 3, - hour = 23, - minute = 1, + date = datetime.datetime( + year=2015, + month=11, + day=3, + hour=23, + minute=1, ) rounded = date_round_weekly(date, day_of_week, time) assert datetime.datetime(2015, 11, 3, 23, 0) == rounded @@ -119,13 +124,13 @@ def test_weekly_examples(): @given(datetimes(), times()) def test_round_daily(date, time): # pragma: no cover """Test if the round function rounds the expected delta""" - time = time_remove_tz(time) - round_date = date_round_daily(date, time) - date_time = datetime.time( - hour = date.hour, - minute = date.minute, - second = date.second, - microsecond = date.microsecond, + time = time_remove_tz(time) + round_date = date_round_daily(date, time) + date_time = datetime.time( + hour=date.hour, + minute=date.minute, + second=date.second, + microsecond=date.microsecond, ) # double round assert round_date == date_round_daily(round_date, time) @@ -138,9 +143,9 @@ def test_round_daily(date, time): # pragma: no cover # Never round more than 24 hours assert date - round_date < datetime.timedelta(hours=24) # Check if rounded on given time - assert round_date.hour == time.hour - assert round_date.minute == time.minute - assert round_date.second == time.second + assert round_date.hour == time.hour + assert round_date.minute == time.minute + assert round_date.second == time.second assert round_date.microsecond == time.microsecond # Expected delta date_delta = date - round_date @@ -155,55 +160,50 @@ def test_round_daily(date, time): # pragma: no cover def test_daily_examples(): """Test if the round function rounds at the given time""" - date = datetime.datetime( - year = 2015, - month = 10, - day = 1, - hour = 12, - minute = 34, - ) - time = datetime.time( - hour = 23, - minute = 00 + date = datetime.datetime( + year=2015, + month=10, + day=1, + hour=12, + minute=34, ) + time = datetime.time(hour=23, minute=00) rounded = date_round_daily(date, time) assert datetime.datetime(2015, 9, 30, 23, 0) == rounded - time = datetime.time( - hour = 11, - minute = 00 - ) + time = datetime.time(hour=11, minute=00) rounded = date_round_daily(date, time) assert datetime.datetime(2015, 10, 1, 11, 0) == rounded - date = datetime.datetime( - year = 2015, - month = 10, - day = 1, - hour = 10, - minute = 59, + date = datetime.datetime( + year=2015, + month=10, + day=1, + hour=10, + minute=59, ) rounded = date_round_daily(date, time) assert datetime.datetime(2015, 9, 30, 11, 0) == rounded - date = datetime.datetime( - year = 2015, - month = 10, - day = 1, - hour = 11, - minute = 1, + date = datetime.datetime( + year=2015, + month=10, + day=1, + hour=11, + minute=1, ) rounded = date_round_daily(date, time) assert datetime.datetime(2015, 10, 1, 11, 0) == rounded def test_snapshot_spec_to_name(): - with test.clean_and_config(os.path.join( + with test.clean_and_config( + os.path.join( _test_base, b"publish-previous.yml", - )) as (tyml, config): - - snaps = tyml['snapshot']['superfake-%T']['merge'] + ) + ) as (tyml, config): + snaps = tyml["snapshot"]["superfake-%T"]["merge"] rounded1 = snapshot_spec_to_name(tyml, snaps[0]) rounded2 = snapshot_spec_to_name(tyml, snaps[1]) - assert rounded1 == 'fakerepo01-20121009T0000Z' - assert rounded2 == 'fakerepo02-20121006T0000Z' + assert rounded1 == "fakerepo01-20121009T0000Z" + assert rounded2 == "fakerepo02-20121006T0000Z" diff --git a/pyaptly/graph_test.py b/pyaptly/graph_test.py index 64a082b..2f08255 100644 --- a/pyaptly/graph_test.py +++ b/pyaptly/graph_test.py @@ -1,8 +1,13 @@ +# type: ignore # TODO +# flake8: noqa # TODO + """Testing dependency graphs""" import random import sys -from . import Command, FunctionCommand, test +from pyaptly.legacy import Command, FunctionCommand + +from . import test if not sys.version_info < (2, 7): # pragma: no cover from hypothesis import strategies as st diff --git a/pyaptly/helpers_test.py b/pyaptly/helpers_test.py index f7db724..bb77f22 100644 --- a/pyaptly/helpers_test.py +++ b/pyaptly/helpers_test.py @@ -1,15 +1,18 @@ +# type: ignore # TODO +# flake8: noqa # TODO + """Testing testing helper functions""" import subprocess -from pyaptly import Command, SystemStateReader, call_output +from pyaptly.legacy import Command, SystemStateReader, call_output def test_call_output_error(): """Test if call_output raises errors correctly""" args = [ - 'bash', - '-c', - 'exit 42', + "bash", + "-c", + "exit 42", ] error = False try: @@ -22,7 +25,7 @@ def test_call_output_error(): def test_command_dependency_fail(): """Test if bad dependencies fail correctly.""" - a = Command(['ls']) + a = Command(["ls"]) error = False try: a.require("turbo", "banana") @@ -35,7 +38,7 @@ def test_dependency_callback_file(): """Test if bad dependencies fail correctly.""" state = SystemStateReader() try: - state.has_dependency(['turbo', 'banana']) + state.has_dependency(["turbo", "banana"]) except ValueError as e: assert "Unknown dependency" in e.args[0] error = True diff --git a/pyaptly/legacy.py b/pyaptly/legacy.py new file mode 100755 index 0000000..2e4bdec --- /dev/null +++ b/pyaptly/legacy.py @@ -0,0 +1,1673 @@ +# type: ignore # TODO +# flake8: noqa # TODO + +#!/usr/bin/env python2 +"""Aptly mirror/snapshot managment automation.""" +import argparse +import codecs +import collections +import datetime +import logging +import os +import re +import subprocess +import sys + +import freeze +import six +import yaml + +_logging_setup = False + +if six.PY2: + environb = os.environ # pragma: no cover +else: + environb = os.environb # pragma: no cover + + +def init_hypothesis(): + """Initialize hypothesis profile if hypothesis is available""" + try: # pragma: no cover:w + if b"HYPOTHESIS_PROFILE" in environb: + from hypothesis import Settings + + Settings.register_profile("ci", Settings(max_examples=10000)) + Settings.load_profile(os.getenv("HYPOTHESIS_PROFILE", "default")) + except (ImportError, AttributeError): # pragma: no cover + pass + + +def get_logger(): + """Get the logger. + + :rtype: logging.Logger""" + return logging.getLogger("pyaptly") + + +lg = get_logger() +init_hypothesis() + + +def iso_first_week_start(iso_year, tzinfo=None): + """The gregorian calendar date of the first day of the given ISO year + + :param iso_year: Year to find the date of the first week. + :type iso_year: int""" + fourth_jan = datetime.datetime(iso_year, 1, 4, tzinfo=tzinfo) + delta = datetime.timedelta(fourth_jan.isoweekday() - 1) + return fourth_jan - delta + + +def iso_to_gregorian(iso_year, iso_week, iso_day, tzinfo=None): + """Gregorian calendar date for the given ISO year, week and day + + :param iso_year: ISO year + :type iso_year: int + :param iso_week: ISO week + :type iso_week: int + :param iso_day: ISO day + :type iso_day: int""" + year_start = iso_first_week_start(iso_year, tzinfo) + return year_start + datetime.timedelta(days=iso_day - 1, weeks=iso_week - 1) + + +def time_remove_tz(time): + """Convert a :py:class`datetime.time` to :py:class`datetime.time` to + without tzinfo. + + :param time: Time to convert + :type time: :py:class:`datetime.time` + :rtype: :py:class:`datetime.time` + """ + return datetime.time( + hour=time.hour, + minute=time.minute, + second=time.second, + microsecond=time.microsecond, + ) + + +def time_delta_helper(time): # pragma: no cover + """Convert a :py:class`datetime.time` to :py:class`datetime.datetime` to + calculate deltas + + :param time: Time to convert + :type time: :py:class:`datetime.time` + :rtype: :py:class:`datetime.datetime` + """ + return datetime.datetime( + year=2000, + month=1, + day=1, + hour=time.hour, + minute=time.minute, + second=time.second, + microsecond=time.microsecond, + tzinfo=time.tzinfo, + ) + + +def date_round_weekly(date, day_of_week=1, time=None): + """Round datetime back (floor) to a given the of the week. + + THIS FUNCTION IGNORES THE TZINFO OF TIME and assumes it is the same tz as + the date. + + :param date: Datetime object to round + :type date: :py:class:`datetime.datetime` + :param day_of_week: ISO day of week: monday is 1 and sunday is 7 + :type day_of_week: int + :param time: Roundpoint in the day (tzinfo ignored) + :type time: :py:class:`datetime.time` + :rtype: :py:class:`datetime.datetime`""" + if time: + time = time_remove_tz(time) + else: # pragma: no cover + time = datetime.time(hour=0, minute=0) + + delta = datetime.timedelta( + days=day_of_week - 1, + hours=time.hour, + minutes=time.minute, + seconds=time.second, + microseconds=time.microsecond, + ) + raster_date = date - delta + iso = raster_date.isocalendar() + rounded_date = iso_to_gregorian(iso[0], iso[1], 1, date.tzinfo) + return rounded_date + delta + + +def date_round_daily(date, time=None): + """Round datetime to day back (floor) to the roundpoint (time) in the day + + THIS FUNCTION IGNORES THE TZINFO OF TIME and assumes it is the same tz as + the date. + + :param date: Datetime object to round + :type date: :py:class:`datetime.datetime` + :param time: Roundpoint in the day (tzinfo ignored) + :type time: :py:class:`datetime.time` + :rtype: :py:class:`datetime.datetime`""" + if time: + time = time_remove_tz(time) + else: # pragma: no cover + time = datetime.time(hour=0, minute=0) + delta = datetime.timedelta( + hours=time.hour, + minutes=time.minute, + seconds=time.second, + microseconds=time.microsecond, + ) + raster_date = date - delta + rounded_date = datetime.datetime( + year=raster_date.year, + month=raster_date.month, + day=raster_date.day, + tzinfo=raster_date.tzinfo, + ) + return rounded_date + delta + + +def call_output(args, input_=None): + """Call command and return output. + + :param args: Command to execute + :type args: list + :param input_: Input to command + :type input_: bytes + """ + p = subprocess.Popen( + args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE + ) + output, err = p.communicate(input_) + if p.returncode != 0: + raise subprocess.CalledProcessError( + p.returncode, + args, + output, + err, + ) + return (output.decode("UTF-8"), err.decode("UTF-8")) + + +class Command(object): + """Repesents a system command and is used to resolve dependencies between + such commands. + + :param cmd: The command as list, one item per argument + :type cmd: list + """ + + pretend_mode = False + + def __init__(self, cmd): + self.cmd = cmd + self._requires = set() + self._provides = set() + self._finished = None + self._known_dependency_types = ( + "snapshot", + "mirror", + "repo", + "publish", + "virtual", + ) + + def get_provides(self): # pragma: no cover + """Return all provides of this command. + + :rtype: set()""" + return self._provides + + def append(self, argument): + """Append additional arguments to the command. + + :param argument: String argument to append + :type argument: str""" + assert str(argument) == argument + self.cmd.append(argument) + + def require(self, type_, identifier): + """Require a dependency for this command. + + :param type_: Type or category of the dependency ie. snapshot + :type type_: str + :param identifier: Identifier of the dependency for example name of a + snapshot + :type identifier: usually str + """ + assert type_ in ( + self._known_dependency_types + + ("any",) + + SystemStateReader.known_dependency_types + ) + self._requires.add((type_, str(identifier))) + + def provide(self, type_, identifier): + """Provide a dependency for this command. + + :param type_: Type or category of the dependency ie. snapshot + :type type_: str + :param identifier: Identifier of the dependency for example name of a + snapshot + :type identifier: usually str + """ + assert type_ in self._known_dependency_types + self._provides.add((type_, str(identifier))) + + def execute(self): + """Execute the command. Return the return value of the command. + + :rtype: integer""" + if self._finished is not None: # pragma: no cover + return self._finished + + if not Command.pretend_mode: + lg.debug("Running command: %s", " ".join(self.cmd)) + self._finished = subprocess.check_call(self.cmd) + else: + lg.info("Pretending to run command: %s", " ".join(self.cmd)) + + return self._finished + + def repr_cmd(self): + """Return repr of the command. + + :rtype: str""" + return repr(self.cmd) + + def __hash__(self): + """Hash of the command. + + :rtype: integer""" + return freeze.recursive_hash((self.cmd, self._requires, self._provides)) + + def __eq__(self, other): + """Equalitity based on the hash, might collide... hmm""" + return self.__hash__() == other.__hash__() + + def __repr__(self): + return "Command<%s requires %s, provides %s>\n" % ( + self.repr_cmd(), + ", ".join([repr(x) for x in self._requires]), + ", ".join([repr(x) for x in self._provides]), + ) + + @staticmethod + def command_list_to_digraph(commands): # pragma: no cover + """Generate dot source for a digraph - suitable for generating + diagrams. + + The requires and provides from the commands build nodes, the commands + themselves act as connectors. + + :param commands: The commands to draw a diagram with + :type commands: list + """ + + nodes = set() + edges = set() + + def result_node(type_, name): + """Get the dot representation of a result node.""" + return ( + '"%s %s" [shape=ellipse]' % (type_, name), + '"%s %s"' % (type_, name), + ) + + def cmd_node(command): + """Get the dot representation of a command node.""" + return ( + '"%s" [shape=box]' % command.repr_cmd(), + '"%s"' % command.repr_cmd(), + ) + + for cmd in commands: + if cmd is None: + continue + + cmd_spec, cmd_identifier = cmd_node(cmd) + nodes.add(cmd_spec) + + for type_, name in cmd._requires: + spec, identifier = result_node(type_, name) + nodes.add(spec) + edges.add((identifier, cmd_identifier)) + + for type_, name in cmd._provides: + spec, identifier = result_node(type_, name) + nodes.add(spec) + edges.add((cmd_identifier, identifier)) + + template = """ + digraph { + %s; + %s; + } + """ + return template % ( + ";\n".join(nodes), + ";\n".join(["%s -> %s" % edge for edge in edges]), + ) + + @staticmethod + def order_commands(commands, has_dependency_cb=lambda x: False): + """Order the commands according to the dependencies they + provide/require. + + :param commands: The commands to order + :type commands: list + :param has_dependency_cb: Optional callback the resolve external + dependencies + :type has_dependency_cb: function""" + + commands = set([c for c in commands if c is not None]) + + lg.debug("Ordering commands: %s", [str(cmd) for cmd in commands]) + + have_requirements = collections.defaultdict(lambda: 0) + required_number = collections.defaultdict(lambda: 0) + scheduled = [] + + for cmd in commands: + for provide in cmd._provides: + required_number[provide] += 1 + + something_changed = True + while something_changed: + something_changed = False + + for cmd in commands: + if cmd in scheduled: + continue + + can_schedule = True + for req in cmd._requires: + if have_requirements[req] < required_number[req]: + lg.debug( + "%s: dependency %s not fulfilled, " + "checking aptly state" % (cmd, req) + ) + # No command providing our dependency.. Let's see if + # it's already otherwise fulfilled + if not has_dependency_cb(req): + lg.debug( + "%s: dependency %s not " + "in aptly state either" % (cmd, req) + ) + can_schedule = False + # Break out of the requirements loop, as the + # command cannot be scheduled anyway. + break + + if can_schedule: + lg.debug("%s: all dependencies fulfilled" % cmd) + scheduled.append(cmd) + for provide in cmd._provides: + have_requirements[provide] += 1 + + something_changed = True + + unresolved = [cmd for cmd in commands if cmd not in scheduled] + + if len(unresolved) > 0: # pragma: no cover + raise ValueError( + "Commands with unresolved deps: %s" % [str(cmd) for cmd in unresolved] + ) + + # Just one last verification before we commence + scheduled_set = set([cmd for cmd in scheduled]) + incoming_set = set([cmd for cmd in commands]) + assert incoming_set == scheduled_set + + lg.info("Reordered commands: %s", [str(cmd) for cmd in scheduled]) + + return scheduled + + +class FunctionCommand(Command): + """Repesents a function command and is used to resolve dependencies between + such commands. This command executes the given function. \*args and + \*\*kwargs are passed through. + + :param func: The function to execute + :type func: callable + """ + + def __init__(self, func, *args, **kwargs): + super(FunctionCommand, self).__init__(None) + + assert hasattr(func, "__call__") + self.cmd = func + self.args = args + self.kwargs = kwargs + + def __hash__(self): + return freeze.recursive_hash( + (id(self.cmd), self.args, self.kwargs, self._requires, self._provides) + ) + + def execute(self): + """Execute the command. (Call the function).""" + if self._finished is not None: # pragma: no cover + return self._finished + + if not Command.pretend_mode: + lg.debug( + "Running code: %s(args=%s, kwargs=%s)", + self.cmd.__name__, + repr(self.args), + repr(self.kwargs), + ) + + self.cmd(*self.args, **self.kwargs) + + self._finished = True + else: # pragma: no cover + lg.info( + "Pretending to run code: %s(args=%s, kwargs=%s)", + self.repr_cmd(), + repr(self.args), + repr(self.kwargs), + ) + + return self._finished + + def repr_cmd(self): + """Return repr of the command. + + :rtype: str""" + # We need to "id" ourselves here so that multiple commands that call a + # function with the same name won't be shown as being equal. + return "%s|%s" % (self.cmd.__name__, id(self)) + + def __repr__(self): + return "FunctionCommand<%s requires %s, provides %s>\n" % ( + self.repr_cmd(), + ", ".join([repr(x) for x in self._requires]), + ", ".join([repr(x) for x in self._provides]), + ) + + +class SystemStateReader(object): + """Reads the state from aptly and gpg to find out what operations have to + be performed to reach the state defined in the yml config-file. + """ + + known_dependency_types = ("repo", "snapshot", "mirror", "gpg_key") + + def __init__(self): + self.gpg_keys = set() + self.mirrors = set() + self.repos = set() + self.snapshots = set() + self.snapshot_map = {} + self.publishes = set() + self.publish_map = {} + + def _extract_sources(self, data): + """ + Extract sources from data. + + Data needs to be in following format: + Name: test-snap + Description: some description + Sources: + test-snap-base [snapshot] + """ + entered_sources = False + sources = [] + for line in data.split("\n"): + # source line need to start with two spaces + if entered_sources and line[0:2] != " ": + break + + if entered_sources: + sources.append(line) + + if line == "Sources:": + entered_sources = True + + return sources + + def read(self): + """Reads all available system states.""" + self.read_gpg() + self.read_repos() + self.read_mirror() + self.read_snapshot() + self.read_snapshot_map() + self.read_publishes() + self.read_publish_map() + + def read_gpg(self): + """Read all trusted keys in gpg.""" + self.gpg_keys = set() + cmd = [ + "gpg", + "--no-default-keyring", + "--keyring", + "trustedkeys.gpg", + "--list-keys", + "--with-colons", + ] + data, _ = call_output(cmd) + lg.debug("GPG returned: %s", data) + for line in data.split("\n"): + field = line.split(":") + if field[0] in ("pub", "sub"): + key = field[4] + key_short = key[8:] + self.gpg_keys.add(key) + self.gpg_keys.add(key_short) + + def read_publish_map(self): + """Create a publish map. publish -> snapshots""" + self.publish_map = {} + # match example: main: test-snapshot [snapshot] + re_snap = re.compile(r"\s+[\w\d-]+\:\s([\w\d-]+)\s\[snapshot\]") + for publish in self.publishes: + prefix, dist = publish.split(" ") + data, _ = call_output(["aptly", "publish", "show", dist, prefix]) + + sources = self._extract_sources(data) + matches = [re_snap.match(source) for source in sources] + snapshots = [match.group(1) for match in matches if match] + self.publish_map[publish] = set(snapshots) + + lg.debug("Joined snapshots and publishes: %s", self.publish_map) + + def read_snapshot_map(self): + """Create a snapshot map. snapshot -> snapshots. This is also called + merge-tree.""" + self.snapshot_map = {} + # match example: test-snapshot [snapshot] + re_snap = re.compile(r"\s+([\w\d-]+)\s\[snapshot\]") + for snapshot_outer in self.snapshots: + data, _ = call_output(["aptly", "snapshot", "show", snapshot_outer]) + sources = self._extract_sources(data) + matches = [re_snap.match(source) for source in sources] + snapshots = [match.group(1) for match in matches if match] + self.snapshot_map[snapshot_outer] = set(snapshots) + + lg.debug("Joined snapshots with self(snapshots): %s", self.snapshot_map) + + def read_publishes(self): + """Read all available publishes.""" + self.publishes = set() + self.read_aptly_list("publish", self.publishes) + + def read_repos(self): + """Read all available repos.""" + self.repos = set() + self.read_aptly_list("repo", self.repos) + + def read_mirror(self): + """Read all available mirrors.""" + self.mirrors = set() + self.read_aptly_list("mirror", self.mirrors) + + def read_snapshot(self): + """Read all available snapshots.""" + self.snapshots = set() + self.read_aptly_list("snapshot", self.snapshots) + + def read_aptly_list(self, type_, list_): + """Generic method to read lists from aptly. + + :param type_: The type of list to read ie. snapshot + :type type_: str + :param list_: Read into this list + :param list_: list""" + data, _ = call_output(["aptly", type_, "list", "-raw"]) + lg.debug("Aptly returned %s: %s", type_, data) + for line in data.split("\n"): + clean_line = line.strip() + if clean_line: + list_.add(clean_line) + + def has_dependency(self, dependency): + """Check system state dependencies. + + :param dependency: The dependency to check + :type dependency: list""" + type_, name = dependency + + if type_ == "repo": # pragma: no cover + return name in self.repos + if type_ == "mirror": # pragma: no cover + return name in self.mirrors + elif type_ == "snapshot": + return name in self.snapshots # pragma: no cover + elif type_ == "gpg_key": # pragma: no cover + return name in self.gpg_keys # Not needed ATM + elif type_ == "virtual": + # virtual dependencies can never be resolved by the + # system state reader - they are used for internal + # ordering only + return False + else: + raise ValueError("Unknown dependency to resolve: %s" % str(dependency)) + + +state = SystemStateReader() + + +def main(argv=None): + """Called by command-line, defines parsers and executes commands. + + :param argv: Arguments usually taken from sys.argv + :type argv: list""" + global _logging_setup + if not argv: # pragma: no cover + argv = sys.argv[1:] + parser = argparse.ArgumentParser(description="Manage aptly") + parser.add_argument( + "--config", + "-c", + help="Yaml config file defining mirrors and snapshots", + type=str, + required=True, + ) + parser.add_argument( + "--debug", + "-d", + help="Enable debug output", + action="store_true", + ) + parser.add_argument( + "--pretend", + "-p", + help="Do not do anything, just print out what WOULD be done", + action="store_true", + ) + subparsers = parser.add_subparsers() + mirror_parser = subparsers.add_parser("mirror", help="manage aptly mirrors") + mirror_parser.set_defaults(func=mirror) + mirror_parser.add_argument("task", type=str, choices=["create", "update"]) + mirror_parser.add_argument("mirror_name", type=str, nargs="?", default="all") + snap_parser = subparsers.add_parser("snapshot", help="manage aptly snapshots") + snap_parser.set_defaults(func=snapshot) + snap_parser.add_argument("task", type=str, choices=["create", "update"]) + snap_parser.add_argument("snapshot_name", type=str, nargs="?", default="all") + publish_parser = subparsers.add_parser( + "publish", help="manage aptly publish endpoints" + ) + publish_parser.set_defaults(func=publish) + publish_parser.add_argument("task", type=str, choices=["create", "update"]) + publish_parser.add_argument("publish_name", type=str, nargs="?", default="all") + repo_parser = subparsers.add_parser("repo", help="manage aptly repositories") + repo_parser.set_defaults(func=repo) + repo_parser.add_argument("task", type=str, choices=["create"]) + repo_parser.add_argument("repo_name", type=str, nargs="?", default="all") + + args = parser.parse_args(argv) + root = logging.getLogger() + formatter = logging.Formatter( + "%(asctime)s - %(name)s - %(levelname)s - %(message)s" + ) + if not _logging_setup: # noqa + handler = logging.StreamHandler(sys.stderr) + handler.setFormatter(formatter) + root.addHandler(handler) + handler.setLevel(logging.CRITICAL) + if args.debug: + root.setLevel(logging.DEBUG) + handler.setLevel(logging.DEBUG) + if args.pretend: + Command.pretend_mode = True + else: + Command.pretend_mode = False + + _logging_setup = True # noqa + lg.debug("Args: %s", vars(args)) + + with codecs.open(args.config, "r", encoding="UTF-8") as cfgfile: + cfg = yaml.load(cfgfile, Loader=yaml.FullLoader) + state.read() + + # run function for selected subparser + args.func(cfg, args) + + +day_of_week_map = { + "mon": 1, + "tue": 2, + "wed": 3, + "thu": 4, + "fri": 5, + "sat": 6, + "sun": 7, +} + + +def expand_timestamped_name(name, timestamp_config, date=None): + """Expand a timestamped name using round_timestamp. + + :param timestamp_config: Contains the recurrence specification for the + timestamp. See :func:`round_timestamp` + :type timestamp_config: dict + :param date: The date to expand the timestamp with. + :type date: :py:class:`datetime.datetime`""" + if "%T" not in name: + return name + timestamp = round_timestamp(timestamp_config, date) + return name.replace("%T", timestamp.strftime("%Y%m%dT%H%MZ")) + + +def round_timestamp(timestamp_config, date=None): + """Round the given name by adding a timestamp. + + The contents of the timestamp is configured by the given timestamp_config + dict, which MUST contain a "time" key, and MAY contain a "repeat-weekly" + key. + + If the key "repeat-weekly" is given, it is expected to contain a + three-letter weekday name (mon, tue, thu, ...). The "time" key is expected + to be a 24 hour HH:MM time specification. + + Timestamps are rounded down to the nearest time as specified (which may be + on the previous day. If repeat-weekly is specified, it is rounded down + (back in time) to the given weekday.) + + The name parameter may be a simple string. If it contains the marker "%T", + then this placeholder will be replaced by the timestamp. If it does NOT + contain that marker, then nothing happens (and the timestamp_config is not + evaluated at all) + + If a datetime object is given as third parameter, then it is used to + generate the timestamp. If it is omitted, the current date/time is used. + + Example: + >>> expand_timestamped_name( + ... 'foo-%T', + ... {'timestamp': {'time': '00:00'}}, + ... datetime.datetime(2015,10,7, 15,30) # A Wednesday + ... ) + 'foo-20151007T0000Z' + + >>> expand_timestamped_name( + ... 'foo-%T', + ... {'timestamp': {'time': '00:00', 'repeat-weekly': 'mon'}}, + ... datetime.datetime(2015,10,8, 15,30) # A Thursday + ... ) + 'foo-20151005T0000Z' + + >>> expand_timestamped_name( + ... 'foo', # No %T placeholder, timestamp info is ignored + ... {'timestamp': {'time': '00:00', 'repeat-weekly': 'mon'}}, + ... datetime.datetime(2015,10,8, 15,30) + ... ) + 'foo' + + :param timestamp_config: Contains the recurrence specification for the + timestamp. + :type timestamp_config: dict + :param date: The date to expand the timestamp with. + :type date: :py:class:`datetime.datetime` + """ + timestamp_info = timestamp_config.get("timestamp", timestamp_config) + config_time = timestamp_info.get("time", "FAIL") + if config_time == "FAIL": # pragma: no cover + raise ValueError( + "Timestamp config has no valid time entry: %s" % str(timestamp_config) + ) + + config_repeat_weekly = timestamp_info.get("repeat-weekly", None) + + hour, minute = [int(x) for x in config_time.split(":")][:2] + + if date is None: + date = datetime.datetime.now() + + if config_repeat_weekly is not None: + day_of_week = day_of_week_map.get(config_repeat_weekly.lower()) + + timestamp = date_round_weekly( + date, day_of_week, datetime.time(hour=hour, minute=minute) + ) + else: + timestamp = date_round_daily(date, datetime.time(hour=hour, minute=minute)) + return timestamp + + +def unit_or_list_to_list(thingy): + """Ensures that a yml entry is always a list. Used to allow lists and + single units in the yml file. + + :param thingy: The data to ensure it is a list + :type thingy: list, tuple or other""" + if isinstance(thingy, list) or isinstance(thingy, tuple): + return list(thingy) + else: + return [thingy] + + +def publish_cmd_create(cfg, publish_name, publish_config, ignore_existing=False): + """Creates a publish command with its dependencies to be ordered and + executed later. + + :param cfg: pyaptly config + :type cfg: dict + :param publish_name: Name of the publish to create + :type publish_name: str + :param publish_config: Configuration of the publish from the yml file. + :type publish_config: dict""" + publish_fullname = "%s %s" % (publish_name, publish_config["distribution"]) + if publish_fullname in state.publishes and not ignore_existing: + # Nothing to do, publish already created + return + + publish_cmd = ["aptly", "publish"] + options = [] + source_args = [] + endpoint_args = [publish_name] + + has_source = False + num_sources = 0 + + for conf, conf_value in publish_config.items(): + if conf == "skip-contents": + if conf_value: + options.append("-skip-contents=true") + elif conf == "architectures": # pragma: no cover + options.append( + "-architectures=%s" % ",".join(unit_or_list_to_list(conf_value)) + ) + elif conf == "components": + components = unit_or_list_to_list(conf_value) + options.append("-component=%s" % ",".join(components)) + elif conf == "label": # pragma: no cover + options.append("-label=%s" % conf_value) + elif conf == "origin": # pragma: no cover + options.append("-origin=%s" % conf_value) + + elif conf == "distribution": + options.append("-distribution=%s" % conf_value) + + elif conf == "gpg-key": + options.append("-gpg-key=%s" % conf_value) + elif conf == "automatic-update": + # Ignored here + pass + elif conf == "snapshots": + if has_source: # pragma: no cover + raise ValueError( + "Multiple sources for publish %s %s" + % (publish_name, publish_config) + ) + has_source = True + snapshots = unit_or_list_to_list(conf_value) + source_args.append("snapshot") + source_args.extend( + [snapshot_spec_to_name(cfg, conf_value) for conf_value in snapshots] + ) + + num_sources = len(snapshots) + + elif conf == "repo": + if has_source: # pragma: no cover + raise ValueError( + "Multiple sources for publish %s %s" + % (publish_name, publish_config) + ) + has_source = True + source_args = ["repo", conf_value] + num_sources = 1 + elif conf == "publish": + if has_source: # pragma: no cover + raise ValueError( + "Multiple sources for publish %s %s" + % (publish_name, publish_config) + ) + has_source = True + conf_value = " ".join(conf_value.split("/")) + source_args.append("snapshot") + try: + sources = state.publish_map[conf_value] + except KeyError: + lg.critical( + ( + "Creating %s has been deferred, please call publish " + "create again" + ) + % publish_name + ) + return + source_args.extend(sources) + num_sources = len(sources) + else: # pragma: no cover + raise ValueError( + "Don't know how to handle publish config entry %s in %s" + % ( + conf, + publish_name, + ) + ) + assert has_source + assert len(components) == num_sources + + return Command(publish_cmd + options + source_args + endpoint_args) + + +def clone_snapshot(origin, destination): + """Creates a clone snapshot command with dependencies to be ordered and + executed later. + + :param origin: The snapshot to clone + :type origin: str + :param destination: The new name of the snapshot + :type destination: str""" + cmd = Command(["aptly", "snapshot", "merge", destination, origin]) + cmd.provide("snapshot", destination) + cmd.require("snapshot", origin) + return cmd + + +def publish_cmd_update(cfg, publish_name, publish_config, ignore_existing=False): + """Creates a publish command with its dependencies to be ordered and + executed later. + + :param cfg: pyaptly config + :type cfg: dict + :param publish_name: Name of the publish to update + :type publish_name: str + :param publish_config: Configuration of the publish from the yml file. + :type publish_config: dict""" + + publish_cmd = ["aptly", "publish"] + options = [] + args = [publish_config["distribution"], publish_name] + + if "skip-contents" in publish_config and publish_config["skip-contents"]: + options.append("-skip-contents=true") + + if "repo" in publish_config: + publish_cmd.append("update") + return Command(publish_cmd + options + args) + + publish_fullname = "%s %s" % (publish_name, publish_config["distribution"]) + current_snapshots = state.publish_map[publish_fullname] + if "snapshots" in publish_config: + snapshots_config = publish_config["snapshots"] + new_snapshots = [snapshot_spec_to_name(cfg, snap) for snap in snapshots_config] + elif "publish" in publish_config: + conf_value = publish_config["publish"] + snapshots_config = [] + ref_publish_name, distribution = conf_value.split(" ") + for publish in cfg["publish"][ref_publish_name]: + if publish["distribution"] == distribution: + snapshots_config.extend(publish["snapshots"]) + break + new_snapshots = list(state.publish_map[conf_value]) + else: # pragma: no cover + raise ValueError( + "No snapshot references configured in publish %s" % publish_name + ) + + if set(new_snapshots) == set(current_snapshots) and not ignore_existing: + # Already pointing to the newest snapshot, nothing to do + return + components = unit_or_list_to_list(publish_config["components"]) + + for snap in snapshots_config: + # snap may be a plain name or a dict.. + if hasattr(snap, "items"): + # Dict mode - only here can we even have an archive option + archive = snap.get("archive-on-update", None) + + if archive: + # Replace any timestamp placeholder with the current + # date/time. Note that this is NOT rounded, as we want to + # know exactly when the archival happened. + archive = archive.replace( + "%T", format_timestamp(datetime.datetime.now()) + ) + if archive in state.snapshots: # pragma: no cover + continue + prefix_to_search = re.sub("%T$", "", snap["name"]) + + current_snapshot = [ + snap_name + for snap_name in sorted(current_snapshots, key=lambda x: -len(x)) + if snap_name.startswith(prefix_to_search) + ][0] + + clone_snapshot(current_snapshot, archive).execute() + + publish_cmd.append("switch") + options.append("-component=%s" % ",".join(components)) + + if "skip-contents" in publish_config and publish_config["skip-contents"]: + options.append("-skip-contents=true") + + return Command(publish_cmd + options + args + new_snapshots) + + +def repo_cmd_create(cfg, repo_name, repo_config): + """Create a repo create command to be ordered and executed later. + + :param cfg: pyaptly config + :type cfg: dict + :param repo_name: Name of the repo to create + :type repo_name: str + :param repo_config: Configuration of the repo from the yml file. + :type repo_config: dict""" + + if repo_name in state.repos: # pragma: no cover + # Nothing to do, repo already created + return + + repo_cmd = ["aptly", "repo"] + options = [] + endpoint_args = ["create", repo_name] + + for conf, conf_value in repo_config.items(): + if conf == "architectures": + options.append( + "-architectures=%s" % ",".join(unit_or_list_to_list(conf_value)) + ) + elif conf == "component": + components = unit_or_list_to_list(conf_value) + options.append("-component=%s" % ",".join(components)) + elif conf == "comment": # pragma: no cover + options.append("-comment=%s" % conf_value) + elif conf == "distribution": + options.append("-distribution=%s" % conf_value) + else: # pragma: no cover + raise ValueError( + "Don't know how to handle repo config entry %s in %s" + % ( + conf, + repo_name, + ) + ) + + return Command(repo_cmd + options + endpoint_args) + + +def repo(cfg, args): + """Creates repository commands, orders and executes them. + + :param cfg: The configuration yml as dict + :type cfg: dict + :param args: The command-line arguments read with :py:mod:`argparse` + :type args: namespace""" + lg.debug("Repositories to create: %s", cfg["repo"]) + + repo_cmds = { + "create": repo_cmd_create, + } + + cmd_repo = repo_cmds[args.task] + + if args.repo_name == "all": + commands = [ + cmd_repo(cfg, repo_name, repo_conf) + for repo_name, repo_conf in cfg["repo"].items() + ] + + for cmd in Command.order_commands(commands, state.has_dependency): + cmd.execute() + + else: + if args.repo_name in cfg["repo"]: + commands = [cmd_repo(cfg, args.repo_name, cfg["repo"][args.repo_name])] + for cmd in Command.order_commands(commands, state.has_dependency): + cmd.execute() + else: + raise ValueError( + "Requested publish is not defined in config file: %s" % (args.repo_name) + ) + + +def publish(cfg, args): + """Creates publish commands, orders and executes them. + + :param cfg: The configuration yml as dict + :type cfg: dict + :param args: The command-line arguments read with :py:mod:`argparse` + :type args: namespace""" + lg.debug("Publishes to create / update: %s", cfg["publish"]) + + # aptly publish snapshot -components ... -architectures ... -distribution + # ... -origin Ubuntu trusty-stable ubuntu/stable + + publish_cmds = { + "create": publish_cmd_create, + "update": publish_cmd_update, + } + + cmd_publish = publish_cmds[args.task] + + if args.publish_name == "all": + commands = [ + cmd_publish(cfg, publish_name, publish_conf_entry) + for publish_name, publish_conf in cfg["publish"].items() + for publish_conf_entry in publish_conf + if publish_conf_entry.get("automatic-update", "false") is True + ] + + for cmd in Command.order_commands(commands, state.has_dependency): + cmd.execute() + + else: + if args.publish_name in cfg["publish"]: + commands = [ + cmd_publish(cfg, args.publish_name, publish_conf_entry) + for publish_conf_entry in cfg["publish"][args.publish_name] + ] + for cmd in Command.order_commands(commands, state.has_dependency): + cmd.execute() + else: + raise ValueError( + "Requested publish is not defined in config file: %s" + % (args.publish_name) + ) + + +def snapshot(cfg, args): + """Creates snapshot commands, orders and executes them. + + :param cfg: The configuration yml as dict + :type cfg: dict + :param args: The command-line arguments read with :py:mod:`argparse` + :type args: namespace""" + lg.debug("Snapshots to create: %s", cfg["snapshot"].keys()) + + snapshot_cmds = { + "create": cmd_snapshot_create, + "update": cmd_snapshot_update, + } + + cmd_snapshot = snapshot_cmds[args.task] + + if args.snapshot_name == "all": + commands = [ + cmd + for snapshot_name, snapshot_config in cfg["snapshot"].items() + for cmd in cmd_snapshot(cfg, snapshot_name, snapshot_config) + ] + + if args.debug: # pragma: no cover + dot_file = "/tmp/commands.dot" + with codecs.open(dot_file, "w", "UTF-8") as fh_dot: + fh_dot.write(Command.command_list_to_digraph(commands)) + lg.info("Wrote command dependency tree graph to %s", dot_file) + + if len(commands) > 0: + for cmd in Command.order_commands(commands, state.has_dependency): + cmd.execute() + + else: + if args.snapshot_name in cfg["snapshot"]: + commands = cmd_snapshot( + cfg, args.snapshot_name, cfg["snapshot"][args.snapshot_name] + ) + + if len(commands) > 0: + for cmd in Command.order_commands(commands, state.has_dependency): + cmd.execute() + + else: + raise ValueError( + "Requested snapshot is not defined in config file: %s" + % (args.snapshot_name) + ) + + +def format_timestamp(timestamp): + """Wrapper for strftime, to ensure we're all using the same format. + + :param timestamp: The timestamp to format + :type timestamp: :py:class:`datetime.datetime`""" + return timestamp.strftime("%Y%m%dT%H%MZ") + + +back_reference_map = { + "current": 0, + "previous": 1, +} + + +def snapshot_spec_to_name(cfg, snapshot): + """Converts a given snapshot short spec to a name. + + A short spec is a value that may either be a string or a dict. + + If it's a string, everything is fine and we just use that as + a snapshot name. + + However if it's a dict, we assume it has the following keys: + + * name: template for the snapshot + * timestamp: information on how to generate the timestamp. + + For further information regarding the timestamp's data structure, + consult the documentation of expand_timestamped_name(). + + :param cfg: Complete yaml config + :type cfg: dict + :param snapshot: Config of the snapshot + :type snapshot: dict + """ + delta = datetime.timedelta(seconds=1) + if hasattr(snapshot, "items"): + name = snapshot["name"] + if "timestamp" not in snapshot: + return name + + ts = snapshot["timestamp"] + back_ref = back_reference_map.get(ts) + if back_ref is None: + back_ref = int(ts) + reference = cfg["snapshot"][name] + + timestamp = datetime.datetime.now() + for _ in range(back_ref + 1): + timestamp = round_timestamp(reference["timestamp"], timestamp) + timestamp -= delta + + timestamp += delta + return name.replace("%T", format_timestamp(timestamp)) + else: # pragma: no cover + return snapshot + + +def dependents_of_snapshot(snapshot_name): + """Yield a flat list of dependents from the current state. + + :rtype: generator""" + for dependent in state.snapshot_map.get(snapshot_name, []): + yield dependent + for sub in dependents_of_snapshot(dependent): # pragma: no cover + yield dependent + + +def rotate_snapshot(cfg, snapshot_name): + """Creates a command to rotate a snapshot in order to be able to update a + current publish. + + :param cfg: pyaptly config + :type cfg: dict + :param snapshot_name: the snapshot to rotate + :type snapshot_name: str""" + rotated_name = cfg["snapshot"][snapshot_name].get( + "rotate_via", + "%s-rotated-%s" % (snapshot_name, format_timestamp(datetime.datetime.now())), + ) + + # First, verify that our snapshot environment is in a sane state. + # Fixing the environment is not currently our task. + + if rotated_name in state.snapshots: # pragma: no cover + raise Exception( + "Cannot update snapshot %s - rotated name %s already exists" + % (snapshot_name, rotated_name) + ) + + cmd = Command(["aptly", "snapshot", "rename", snapshot_name, rotated_name]) + + cmd.provide("virtual", rotated_name) + return cmd + + +def cmd_snapshot_update(cfg, snapshot_name, snapshot_config): + """Create commands to update all rotating snapshots. + + :param cfg: pyaptly config + :type cfg: dict + :param snapshot_name: Name of the snapshot to update/rotate + :type snapshot_name: str + :param snapshot_config: Configuration of the snapshot from the yml file. + :type snapshot_config: dict""" + + # To update a snapshot, we need to do roughly the following steps: + # 1) Rename the current snapshot and all snapshots that depend on it + # 2) Create new version of the snapshot and all snapshots that depend on it + # 3) Recreate all renamed snapshots + # 4) Update / switch-over publishes + # 5) Remove the rotated temporary snapshots + + if "%T" in snapshot_name: # pragma: no cover + # Timestamped snapshots are never rotated by design. + return [] + + affected_snapshots = [snapshot_name] + affected_snapshots.extend(list(dependents_of_snapshot(snapshot_name))) + + # TODO: rotated snapshots should be identified by configuration option, not + # just by "not being timestamped + + rename_cmds = [rotate_snapshot(cfg, snap) for snap in affected_snapshots] + + # The "intermediate" command causes the state reader to refresh. At the + # same time, it provides a collection point for dependency handling. + intermediate = FunctionCommand(state.read) + intermediate.provide("virtual", "all-snapshots-rotated") + + for cmd in rename_cmds: + # Ensure that our "intermediate" pseudo command comes after all + # the rename commands, by ensuring it depends on all their "virtual" + # provided items. + cmd_vprovides = [ + provide for ptype, provide in cmd.get_provides() if ptype == "virtual" + ] + for provide in cmd_vprovides: + intermediate.require("virtual", provide) + + # Same as before - create a focal point to "collect" dependencies + # after the snapshots have been rebuilt. Also reload state once again + intermediate2 = FunctionCommand(state.read) + intermediate2.provide("virtual", "all-snapshots-rebuilt") + + create_cmds = [] + for _ in affected_snapshots: + # Well.. there's normally just one, but since we need interface + # consistency, cmd_snapshot_create() returns a list. And since it + # returns a list, we may just as well future-proof it and loop instead + # of assuming it's going to be a single entry (and fail horribly if + # this assumption changes in the future). + for create_cmd in cmd_snapshot_create( + cfg, snapshot_name, cfg["snapshot"][snapshot_name], ignore_existing=True + ): + # enforce cmd to run after the refresh, and thus also + # after all the renames + create_cmd.require("virtual", "all-snapshots-rotated") + + # Evil hack - we must do the dependencies ourselves, to avoid + # getting a circular graph + create_cmd._requires = set( + [ + (type_, req) + for type_, req in create_cmd._requires + if type_ != "snapshot" + ] + ) + + create_cmd.provide("virtual", "readyness-for-%s" % snapshot_name) + for follower in dependents_of_snapshot(snapshot_name): + create_cmd.require("virtual", "readyness-for-%s" % follower) + + # "Focal point" - make intermediate2 run after all the commands + # that re-create the snapshots + create_cmd.provide("virtual", "rebuilt-%s" % snapshot_name) + intermediate2.require("virtual", "rebuilt-%s" % snapshot_name) + + create_cmds.append(create_cmd) + + # At this point, snapshots have been renamed, then recreated. + # After each of the steps, the system state has been re-read. + # So now, we're left with updating the publishes. + + def is_publish_affected(name, publish): + if "%s %s" % (name, publish["distribution"]) in state.publishes: + try: + for snap in publish["snapshots"]: + snap_name = snapshot_spec_to_name(cfg, snap) + if snap_name in affected_snapshots: + return True + except KeyError: # pragma: no cover + lg.debug( + ( + "Publish endpoint %s is not affected because it has no " + "snapshots defined" + ) + % name + ) + return False + return False + + if "publish" in cfg: + all_publish_commands = [ + publish_cmd_update( + cfg, publish_name, publish_conf_entry, ignore_existing=True + ) + for publish_name, publish_conf in cfg["publish"].items() + for publish_conf_entry in publish_conf + if publish_conf_entry.get("automatic-update", "false") is True + if is_publish_affected(publish_name, publish_conf_entry) + ] + else: + all_publish_commands = [] + + republish_cmds = [c for c in all_publish_commands if c] + + # Ensure that the republish commands run AFTER the snapshots are rebuilt + for cmd in republish_cmds: + cmd.require("virtual", "all-snapshots-rebuilt") + + # TODO: + # - We need to cleanup all the rotated snapshots after the publishes are + # rebuilt + # - Filter publishes, so only the non-timestamped publishes are rebuilt + + return rename_cmds + create_cmds + republish_cmds + [intermediate, intermediate2] + + +def cmd_snapshot_create(cfg, snapshot_name, snapshot_config, ignore_existing=False): + """Create a snapshot create command to be ordered and executed later. + + :param cfg: pyaptly config + :type cfg: dict + :param snapshot_name: Name of the snapshot to create + :type snapshot_name: str + :param snapshot_config: Configuration of the snapshot from the yml file. + :type snapshot_config: dict + :param ignore_existing: Optional, defaults to False. If set to True, still + return a command object even if the requested + snapshot already exists + :type ignore_existing: dict + + :rtype: Command + """ + + # TODO: extract possible timestamp component + # and generate *actual* snapshot name + + snapshot_name = expand_timestamped_name(snapshot_name, snapshot_config) + + if snapshot_name in state.snapshots and not ignore_existing: + return [] + + default_aptly_cmd = ["aptly", "snapshot", "create"] + default_aptly_cmd.append(snapshot_name) + default_aptly_cmd.append("from") + + if "mirror" in snapshot_config: + cmd = Command(default_aptly_cmd + ["mirror", snapshot_config["mirror"]]) + cmd.provide("snapshot", snapshot_name) + cmd.require("mirror", snapshot_config["mirror"]) + return [cmd] + + elif "repo" in snapshot_config: + cmd = Command(default_aptly_cmd + ["repo", snapshot_config["repo"]]) + cmd.provide("snapshot", snapshot_name) + cmd.require("repo", snapshot_config["repo"]) + return [cmd] + + elif "filter" in snapshot_config: + cmd = Command( + [ + "aptly", + "snapshot", + "filter", + snapshot_spec_to_name(cfg, snapshot_config["filter"]["source"]), + snapshot_name, + snapshot_config["filter"]["query"], + ] + ) + cmd.provide("snapshot", snapshot_name) + cmd.require( + "snapshot", snapshot_spec_to_name(cfg, snapshot_config["filter"]["source"]) + ) + return [cmd] + + elif "merge" in snapshot_config: + cmd = Command( + [ + "aptly", + "snapshot", + "merge", + snapshot_name, + ] + ) + cmd.provide("snapshot", snapshot_name) + + for source in snapshot_config["merge"]: + source_name = snapshot_spec_to_name(cfg, source) + cmd.append(source_name) + cmd.require("snapshot", source_name) + + return [cmd] + + else: # pragma: no cover + raise ValueError("Don't know how to handle snapshot config" % (snapshot_config)) + + +def mirror(cfg, args): + """Creates mirror commands, orders and executes them. + + :param cfg: The configuration yml as dict + :type cfg: dict + :param args: The command-line arguments read with :py:mod:`argparse` + :type args: namespace""" + lg.debug("Mirrors to create: %s", cfg["mirror"]) + + mirror_cmds = { + "create": cmd_mirror_create, + "update": cmd_mirror_update, + } + + cmd_mirror = mirror_cmds[args.task] + + if args.mirror_name == "all": + for mirror_name, mirror_config in cfg["mirror"].items(): + cmd_mirror(cfg, mirror_name, mirror_config) + else: + if args.mirror_name in cfg["mirror"]: + cmd_mirror(cfg, args.mirror_name, cfg["mirror"][args.mirror_name]) + else: + raise ValueError( + "Requested mirror is not defined in config file: %s" + % (args.mirror_name) + ) + + +def add_gpg_keys(mirror_config): + """Uses the gpg command-line to download and add gpg keys needed to create + mirrors. + + :param mirror_config: The configuration yml as dict + :type mirror_config: dict + """ + keys_urls = {} + if "gpg-keys" in mirror_config: + keys = unit_or_list_to_list(mirror_config["gpg-keys"]) + if "gpg-urls" in mirror_config: + urls = unit_or_list_to_list(mirror_config["gpg-urls"]) + urls_len = len(urls) + for x in range(len(keys)): + if x < urls_len: + url = urls[x] + else: # pragma: no cover + url = None + keys_urls[keys[x]] = url + else: + for key in keys: + keys_urls[key] = None + + for key in keys_urls.keys(): + if key in state.gpg_keys: + continue + try: + key_command = [ + "gpg", + "--no-default-keyring", + "--keyring", + "trustedkeys.gpg", + "--keyserver", + "hkp://127.0.0.1:8080", + "--recv-keys", + key, + ] + lg.debug("Adding gpg key with call: %s", key_command) + subprocess.check_call(key_command) + except subprocess.CalledProcessError: # pragma: no cover + url = keys_urls[key] + if url: + key_command = ( + "curl %s | " + "gpg --no-default-keyring --keyring trustedkeys.gpg " + "--import" + ) % url + subprocess.check_call(["bash", "-c", key_command]) + else: + raise + state.read_gpg() + + +def cmd_mirror_create(cfg, mirror_name, mirror_config): + """Create a mirror create command to be ordered and executed later. + + :param cfg: The configuration yml as dict + :type cfg: dict + :param mirror_name: Name of the mirror to create + :type mirror_name: str + :param mirror_config: Configuration of the snapshot from the yml file. + :type mirror_config: dict""" + + if mirror_name in state.mirrors: # pragma: no cover + return + + add_gpg_keys(mirror_config) + aptly_cmd = ["aptly", "mirror", "create"] + + if "sources" in mirror_config and mirror_config["sources"]: + aptly_cmd.append("-with-sources") + else: + aptly_cmd.append("-with-sources=false") + + if "udeb" in mirror_config and mirror_config["udeb"]: + aptly_cmd.append("-with-udebs") + + if "architectures" in mirror_config: + aptly_cmd.append( + "-architectures={0}".format( + ",".join(unit_or_list_to_list(mirror_config["architectures"])) + ) + ) + + aptly_cmd.append(mirror_name) + aptly_cmd.append(mirror_config["archive"]) + aptly_cmd.append(mirror_config["distribution"]) + aptly_cmd.extend(unit_or_list_to_list(mirror_config["components"])) + + lg.debug("Running command: %s", " ".join(aptly_cmd)) + subprocess.check_call(aptly_cmd) + + +def cmd_mirror_update(cfg, mirror_name, mirror_config): + """Create a mirror update command to be ordered and executed later. + + :param cfg: pyaptly config + :type cfg: dict + :param mirror_name: Name of the mirror to create + :type mirror_name: str + :param mirror_config: Configuration of the snapshot from the yml file. + :type mirror_config: dict""" + if mirror_name not in state.mirrors: # pragma: no cover + raise Exception("Mirror not created yet") + add_gpg_keys(mirror_config) + aptly_cmd = ["aptly", "mirror", "update"] + if "max-tries" in mirror_config: + aptly_cmd.append("-max-tries=%d" % mirror_config["max-tries"]) + + aptly_cmd.append(mirror_name) + lg.debug("Running command: %s", " ".join(aptly_cmd)) + subprocess.check_call(aptly_cmd) + + +if __name__ == "__main__": # pragma: no cover + main() diff --git a/pyaptly/test.py b/pyaptly/test.py index 409980c..7556c16 100644 --- a/pyaptly/test.py +++ b/pyaptly/test.py @@ -1,3 +1,6 @@ +# type: ignore # TODO +# flake8: noqa # TODO + """Tools for testing pyaptly""" import codecs @@ -15,7 +18,7 @@ import six import yaml -import pyaptly +import pyaptly.legacy as pyaptly aptly_conf = Path.home().absolute() / ".aptly.conf" @@ -129,7 +132,7 @@ def clean_and_config(test_input, freeze="2012-10-10 10:10:10", sign=False): aptly = tempdir / "aptly" aptly.mkdir(parents=True) config = {"rootDir": str(aptly)} - if aptly_conf.exists(): + if aptly_conf.exists(): # pragma: no cover aptly_conf.unlink() with aptly_conf.open("w") as f: json.dump(config, f) diff --git a/pyaptly/test_test.py b/pyaptly/test_test.py index 32b591c..fd4a9b4 100644 --- a/pyaptly/test_test.py +++ b/pyaptly/test_test.py @@ -1,3 +1,6 @@ +# type: ignore # TODO +# flake8: noqa # TODO + """Testing the testing tools""" import os @@ -14,56 +17,43 @@ if sys.version_info < (2, 7): # pragma: no cover import mock + given = mock.MagicMock() # noqa example = mock.MagicMock() # noqa st = mock.MagicMock() # noqa -_test_base = os.path.dirname( - os.path.abspath(__file__) -).encode("UTF-8") +_test_base = os.path.dirname(os.path.abspath(__file__)).encode("UTF-8") yml_st = st.recursive( - st.floats(-1, 1) | st.booleans() | - st.text() | st.none() | st.binary(), - lambda children: st.lists( - children, max_size=10 - ) | st.dictionaries( - st.text(), - children, - max_size=10 - ), - max_leaves=30 + st.floats(-1, 1) | st.booleans() | st.text() | st.none() | st.binary(), + lambda children: st.lists(children, max_size=10) + | st.dictionaries(st.text(), children, max_size=10), + max_leaves=30, ) class TestTest(unittest.TestCase): def test_read_yml(self): """Test if reading yml files works without errors.""" - path = os.path.join( - _test_base, - b"merge.yml" - ) + path = os.path.join(_test_base, b"merge.yml") yml = test.read_yml(path) - assert yml['mirror']['fakerepo01'] is not None + assert yml["mirror"]["fakerepo01"] is not None def test_delete(self): """Test if merges can delete fields""" - path = os.path.join( - _test_base, - b"delete_merge.yml" - ) + path = os.path.join(_test_base, b"delete_merge.yml") yml = test.read_yml(path) - assert 'fakerepo01' not in yml['mirror'] + assert "fakerepo01" not in yml["mirror"] @test.hypothesis_min_ver @given(yml_st, yml_st, st.random_module()) - @example({'1': 'Huhu'}, {'1': 'None'}, st.random_module()) + @example({"1": "Huhu"}, {"1": "None"}, st.random_module()) def test_merge(self, a, b, rand): # pragma: no cover """Test if merge has the expected result.""" - res = test.merge(a, b) + res = test.merge(a, b) for _ in range(10): path, data_b = self.rand_path(b) - if data_b == 'None': + if data_b == "None": error = False try: data_res = self.get_path(path, res) @@ -76,7 +66,7 @@ def test_merge(self, a, b, rand): # pragma: no cover if isinstance(a, dict) and isinstance(b, dict): path, data_a = self.rand_path(a) try: - data_res = self.get_path(path, res) + data_res = self.get_path(path, res) if data_a != data_res: # pragma: no cover data_b = self.get_path(path, b) assert data_res == data_b diff --git a/pyaptly/tests/__init__.py b/pyaptly/tests/__init__.py new file mode 100644 index 0000000..7509213 --- /dev/null +++ b/pyaptly/tests/__init__.py @@ -0,0 +1 @@ +"""The tests module, containg the test.""" diff --git a/pyaptly/tests/bad-unicode.bin b/pyaptly/tests/bad-unicode.bin new file mode 100644 index 0000000..50f75b1 --- /dev/null +++ b/pyaptly/tests/bad-unicode.bin @@ -0,0 +1 @@ +he˙llo \ No newline at end of file diff --git a/pyaptly/tests/test_mirror.py b/pyaptly/tests/test_mirror.py index 0f1bedd..a4479c2 100644 --- a/pyaptly/tests/test_mirror.py +++ b/pyaptly/tests/test_mirror.py @@ -1,3 +1,4 @@ +"""Test mirror functionality.""" import logging import pytest @@ -13,7 +14,6 @@ def test_mirror_create(environment, config, caplog): caplog.set_level(logging.DEBUG) pyaptly.main(["-c", config_file, "mirror", "create"]) - keys_added = [] for rec in caplog.records: for arg in rec.args: @@ -31,6 +31,7 @@ def test_mirror_create(environment, config, caplog): @pytest.mark.parametrize("config", ["mirror-basic.toml"], indirect=True) def test_mirror_update(environment, config): + """Test if updating mirrors works.""" config_file, config_dict = config do_mirror_update(config_file) diff --git a/pyaptly/tests/test_util.py b/pyaptly/tests/test_util.py new file mode 100644 index 0000000..55e71e7 --- /dev/null +++ b/pyaptly/tests/test_util.py @@ -0,0 +1,31 @@ +"""Test the util.py module.""" +import pytest + +from .. import util + +EXPECT = """ +stdout: 'first + second' +""".strip() + + +@pytest.mark.parametrize("decode", [True, False]) +@pytest.mark.parametrize("unicode_error", [True, False]) +def test_run(test_path, debug_mode, caplog, decode, unicode_error): + """Testing the instrumented run function.""" + if unicode_error: + if decode: + with pytest.raises(UnicodeDecodeError): + util.run(["/bin/cat", test_path / "bad-unicode.bin"], decode=decode) + else: + util.run(["/bin/cat", test_path / "bad-unicode.bin"], decode=decode) + assert "stdout: 'b'he\\xffllo''" in caplog.messages[0] + else: + util.run(["sh", "-c", "printf hello"], decode=decode) + caplog.clear() + util.run(["sh", "-c", "printf error 1>&2; false"], decode=decode) + assert "stderr: 'error'" in caplog.messages[0] + assert "returncode: 1" in caplog.messages[0] + caplog.clear() + util.run(["sh", "-c", "printf 'first\nsecond'"], decode=decode) + assert EXPECT in caplog.messages[0] diff --git a/pyaptly/util.py b/pyaptly/util.py index 5109277..fa23133 100644 --- a/pyaptly/util.py +++ b/pyaptly/util.py @@ -1,6 +1,9 @@ +"""Basic function like running processes and logging.""" + import logging import subprocess -from subprocess import DEVNULL, PIPE +from subprocess import DEVNULL, PIPE # noqa: F401 +from typing import Union _DEBUG = False _PYTEST_DEBUG = False @@ -22,8 +25,8 @@ def is_debug_mode(): return _DEBUG or _PYTEST_DEBUG -def run(cmd_args, *, decode=True, **kwargs): - """Instrumented subprocess.run() for easier debugging. +def run(cmd_args: list[str], *, decode: bool = True, **kwargs): + """Instrumented subprocess.run for easier debugging. By default this run command will add `encoding="UTF-8"` to kwargs. Disable with `decode=False`. @@ -39,7 +42,7 @@ def run(cmd_args, *, decode=True, **kwargs): kwargs["stderr"] = PIPE added_stderr = True result = None - if decode: + if decode and "encoding" not in kwargs: kwargs["encoding"] = "UTF-8" try: result = subprocess.run(cmd_args, **kwargs) @@ -54,7 +57,12 @@ def run(cmd_args, *, decode=True, **kwargs): return result -def indent_out(output): +def indent_out(output: Union[bytes, str]) -> str: + """Indent command output for nicer logging-messages. + + It will convert bytes to strings if need or display the result as bytes if + decoding fails. + """ output = output.strip() if not output: return "" @@ -78,7 +86,8 @@ def indent_out(output): return "\n".join(result) -def log_run_result(result): +def log_run_result(result: subprocess.CompletedProcess): + """Log a CompletedProcess result log debug.""" msg = RESULT_LOG.format( args=result.args, returncode=result.returncode, @@ -88,7 +97,7 @@ def log_run_result(result): logger.debug(msg) -def parse_aptly_show_command(show): +def parse_aptly_show_command(show: str) -> dict[str, str]: """Parse an aptly show command.""" result = {} for line in show.split("\n"): diff --git a/pyaptly/version.py b/pyaptly/version.py index 1db2ce3..2293fb3 100644 --- a/pyaptly/version.py +++ b/pyaptly/version.py @@ -1,2 +1,5 @@ +# type: ignore # TODO +# flake8: noqa # TODO + """Version module to be read from various places""" __version__ = "1.2.0" # pragma: no cover diff --git a/pyproject.toml b/pyproject.toml index 2a9ed7a..3fb59e2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,9 +6,42 @@ authors = ["Jean-Louis Fuchs "] license = "AGPL-3.0-or-later" readme = "README.md" +[tool.mypy] +check_untyped_defs = true + +[tool.isort] +profile = "black" + [tool.poetry.scripts] pyaptly = 'pyaptly.cli:cli' +[tool.flake8] +ignore = [ + # whitespace before ':' + "E203", + # too many leading ### in a block comment + "E266", + # expected 2 blank lines, found 1 (flake8 and black disagree on what to do + # with multiple comment blocks at top level) + "E302", + # line too long (managed by black) + "E501", + # Line break occurred before a binary operator (this is not PEP8 compatible) + "W503", +] + +max-line-length = 88 + +[tool.coverage.report] +fail_under = 100 +exclude_lines = [ + "pragma: no cover", + "pragma: todo cover", + "@pytest.mark.skip", +] +omit = [] +show_missing = true + [tool.poetry.dependencies] python = "^3.11" pretty-dump = {git = "https://github.com/adfinis/freeze"} @@ -28,16 +61,23 @@ mypy = "^1.7.1" pdbpp = "^0.10.3" black = "^23.11.0" isort = "^5.12.0" -flake8 = "^6.1.0" + python-lsp-server = "^1.9.0" python-lsp-black = "^1.3.0" +python-lsp-isort = "^0.1" + +flake8 = "^6.1.0" flake8-bugbear = "^23.12.2" flake8-debugger = "^4.1.2" flake8-isort = "^6.1.1" flake8-docstrings = "^1.7.0" flake8-string-format = "^0.3.0" flake8-tuple = "^0.4.1" -python-lsp-isort = "^0.1" +flake8-pyproject = "^1.2.3" + +types-toml = "^0.10.8.7" +types-pyyaml = "^6.0.12.12" +pytest-coverage = "^0.0" [build-system] requires = ["poetry-core"]