diff --git a/TODO b/TODO index 9014fba..20f001f 100644 --- a/TODO +++ b/TODO @@ -31,3 +31,14 @@ The reason for this is to ensure the continuity of the git history. # Do all output via logging module - Idea show INFO and above, but display info without timestamp etc. + +# latest, maybe repeat: + +- replace all prints with logging +- correctly do getLogger +- is this unit_or_list... thing still needed +- remove codecs.open +- use pathlib.Path +- replace state_reader.state with function +- remove six +- remote test.py completely diff --git a/poetry.lock b/poetry.lock index 3ec420a..1a24629 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "attrs" @@ -90,63 +90,63 @@ files = [ [[package]] name = "coverage" -version = "7.4.0" +version = "7.4.3" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36b0ea8ab20d6a7564e89cb6135920bc9188fb5f1f7152e94e8300b7b189441a"}, - {file = "coverage-7.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0676cd0ba581e514b7f726495ea75aba3eb20899d824636c6f59b0ed2f88c471"}, - {file = "coverage-7.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ca5c71a5a1765a0f8f88022c52b6b8be740e512980362f7fdbb03725a0d6b9"}, - {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7c97726520f784239f6c62506bc70e48d01ae71e9da128259d61ca5e9788516"}, - {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:815ac2d0f3398a14286dc2cea223a6f338109f9ecf39a71160cd1628786bc6f5"}, - {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:80b5ee39b7f0131ebec7968baa9b2309eddb35b8403d1869e08f024efd883566"}, - {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5b2ccb7548a0b65974860a78c9ffe1173cfb5877460e5a229238d985565574ae"}, - {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:995ea5c48c4ebfd898eacb098164b3cc826ba273b3049e4a889658548e321b43"}, - {file = "coverage-7.4.0-cp310-cp310-win32.whl", hash = "sha256:79287fd95585ed36e83182794a57a46aeae0b64ca53929d1176db56aacc83451"}, - {file = "coverage-7.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:5b14b4f8760006bfdb6e08667af7bc2d8d9bfdb648351915315ea17645347137"}, - {file = "coverage-7.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:04387a4a6ecb330c1878907ce0dc04078ea72a869263e53c72a1ba5bbdf380ca"}, - {file = "coverage-7.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea81d8f9691bb53f4fb4db603203029643caffc82bf998ab5b59ca05560f4c06"}, - {file = "coverage-7.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74775198b702868ec2d058cb92720a3c5a9177296f75bd97317c787daf711505"}, - {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76f03940f9973bfaee8cfba70ac991825611b9aac047e5c80d499a44079ec0bc"}, - {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:485e9f897cf4856a65a57c7f6ea3dc0d4e6c076c87311d4bc003f82cfe199d25"}, - {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6ae8c9d301207e6856865867d762a4b6fd379c714fcc0607a84b92ee63feff70"}, - {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bf477c355274a72435ceb140dc42de0dc1e1e0bf6e97195be30487d8eaaf1a09"}, - {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:83c2dda2666fe32332f8e87481eed056c8b4d163fe18ecc690b02802d36a4d26"}, - {file = "coverage-7.4.0-cp311-cp311-win32.whl", hash = "sha256:697d1317e5290a313ef0d369650cfee1a114abb6021fa239ca12b4849ebbd614"}, - {file = "coverage-7.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:26776ff6c711d9d835557ee453082025d871e30b3fd6c27fcef14733f67f0590"}, - {file = "coverage-7.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:13eaf476ec3e883fe3e5fe3707caeb88268a06284484a3daf8250259ef1ba143"}, - {file = "coverage-7.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846f52f46e212affb5bcf131c952fb4075b55aae6b61adc9856222df89cbe3e2"}, - {file = "coverage-7.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26f66da8695719ccf90e794ed567a1549bb2644a706b41e9f6eae6816b398c4a"}, - {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:164fdcc3246c69a6526a59b744b62e303039a81e42cfbbdc171c91a8cc2f9446"}, - {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:316543f71025a6565677d84bc4df2114e9b6a615aa39fb165d697dba06a54af9"}, - {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bb1de682da0b824411e00a0d4da5a784ec6496b6850fdf8c865c1d68c0e318dd"}, - {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:0e8d06778e8fbffccfe96331a3946237f87b1e1d359d7fbe8b06b96c95a5407a"}, - {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a56de34db7b7ff77056a37aedded01b2b98b508227d2d0979d373a9b5d353daa"}, - {file = "coverage-7.4.0-cp312-cp312-win32.whl", hash = "sha256:51456e6fa099a8d9d91497202d9563a320513fcf59f33991b0661a4a6f2ad450"}, - {file = "coverage-7.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:cd3c1e4cb2ff0083758f09be0f77402e1bdf704adb7f89108007300a6da587d0"}, - {file = "coverage-7.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e9d1bf53c4c8de58d22e0e956a79a5b37f754ed1ffdbf1a260d9dcfa2d8a325e"}, - {file = "coverage-7.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:109f5985182b6b81fe33323ab4707011875198c41964f014579cf82cebf2bb85"}, - {file = "coverage-7.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cc9d4bc55de8003663ec94c2f215d12d42ceea128da8f0f4036235a119c88ac"}, - {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc6d65b21c219ec2072c1293c505cf36e4e913a3f936d80028993dd73c7906b1"}, - {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a10a4920def78bbfff4eff8a05c51be03e42f1c3735be42d851f199144897ba"}, - {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b8e99f06160602bc64da35158bb76c73522a4010f0649be44a4e167ff8555952"}, - {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7d360587e64d006402b7116623cebf9d48893329ef035278969fa3bbf75b697e"}, - {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:29f3abe810930311c0b5d1a7140f6395369c3db1be68345638c33eec07535105"}, - {file = "coverage-7.4.0-cp38-cp38-win32.whl", hash = "sha256:5040148f4ec43644702e7b16ca864c5314ccb8ee0751ef617d49aa0e2d6bf4f2"}, - {file = "coverage-7.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:9864463c1c2f9cb3b5db2cf1ff475eed2f0b4285c2aaf4d357b69959941aa555"}, - {file = "coverage-7.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:936d38794044b26c99d3dd004d8af0035ac535b92090f7f2bb5aa9c8e2f5cd42"}, - {file = "coverage-7.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:799c8f873794a08cdf216aa5d0531c6a3747793b70c53f70e98259720a6fe2d7"}, - {file = "coverage-7.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7defbb9737274023e2d7af02cac77043c86ce88a907c58f42b580a97d5bcca9"}, - {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a1526d265743fb49363974b7aa8d5899ff64ee07df47dd8d3e37dcc0818f09ed"}, - {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf635a52fc1ea401baf88843ae8708591aa4adff875e5c23220de43b1ccf575c"}, - {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:756ded44f47f330666843b5781be126ab57bb57c22adbb07d83f6b519783b870"}, - {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0eb3c2f32dabe3a4aaf6441dde94f35687224dfd7eb2a7f47f3fd9428e421058"}, - {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bfd5db349d15c08311702611f3dccbef4b4e2ec148fcc636cf8739519b4a5c0f"}, - {file = "coverage-7.4.0-cp39-cp39-win32.whl", hash = "sha256:53d7d9158ee03956e0eadac38dfa1ec8068431ef8058fe6447043db1fb40d932"}, - {file = "coverage-7.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:cfd2a8b6b0d8e66e944d47cdec2f47c48fef2ba2f2dff5a9a75757f64172857e"}, - {file = "coverage-7.4.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:c530833afc4707fe48524a44844493f36d8727f04dcce91fb978c414a8556cc6"}, - {file = "coverage-7.4.0.tar.gz", hash = "sha256:707c0f58cb1712b8809ece32b68996ee1e609f71bd14615bd8f87a1293cb610e"}, + {file = "coverage-7.4.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8580b827d4746d47294c0e0b92854c85a92c2227927433998f0d3320ae8a71b6"}, + {file = "coverage-7.4.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:718187eeb9849fc6cc23e0d9b092bc2348821c5e1a901c9f8975df0bc785bfd4"}, + {file = "coverage-7.4.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:767b35c3a246bcb55b8044fd3a43b8cd553dd1f9f2c1eeb87a302b1f8daa0524"}, + {file = "coverage-7.4.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae7f19afe0cce50039e2c782bff379c7e347cba335429678450b8fe81c4ef96d"}, + {file = "coverage-7.4.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba3a8aaed13770e970b3df46980cb068d1c24af1a1968b7818b69af8c4347efb"}, + {file = "coverage-7.4.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ee866acc0861caebb4f2ab79f0b94dbfbdbfadc19f82e6e9c93930f74e11d7a0"}, + {file = "coverage-7.4.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:506edb1dd49e13a2d4cac6a5173317b82a23c9d6e8df63efb4f0380de0fbccbc"}, + {file = "coverage-7.4.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd6545d97c98a192c5ac995d21c894b581f1fd14cf389be90724d21808b657e2"}, + {file = "coverage-7.4.3-cp310-cp310-win32.whl", hash = "sha256:f6a09b360d67e589236a44f0c39218a8efba2593b6abdccc300a8862cffc2f94"}, + {file = "coverage-7.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:18d90523ce7553dd0b7e23cbb28865db23cddfd683a38fb224115f7826de78d0"}, + {file = "coverage-7.4.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cbbe5e739d45a52f3200a771c6d2c7acf89eb2524890a4a3aa1a7fa0695d2a47"}, + {file = "coverage-7.4.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:489763b2d037b164846ebac0cbd368b8a4ca56385c4090807ff9fad817de4113"}, + {file = "coverage-7.4.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:451f433ad901b3bb00184d83fd83d135fb682d780b38af7944c9faeecb1e0bfe"}, + {file = "coverage-7.4.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fcc66e222cf4c719fe7722a403888b1f5e1682d1679bd780e2b26c18bb648cdc"}, + {file = "coverage-7.4.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3ec74cfef2d985e145baae90d9b1b32f85e1741b04cd967aaf9cfa84c1334f3"}, + {file = "coverage-7.4.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:abbbd8093c5229c72d4c2926afaee0e6e3140de69d5dcd918b2921f2f0c8baba"}, + {file = "coverage-7.4.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:35eb581efdacf7b7422af677b92170da4ef34500467381e805944a3201df2079"}, + {file = "coverage-7.4.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8249b1c7334be8f8c3abcaaa996e1e4927b0e5a23b65f5bf6cfe3180d8ca7840"}, + {file = "coverage-7.4.3-cp311-cp311-win32.whl", hash = "sha256:cf30900aa1ba595312ae41978b95e256e419d8a823af79ce670835409fc02ad3"}, + {file = "coverage-7.4.3-cp311-cp311-win_amd64.whl", hash = "sha256:18c7320695c949de11a351742ee001849912fd57e62a706d83dfc1581897fa2e"}, + {file = "coverage-7.4.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b51bfc348925e92a9bd9b2e48dad13431b57011fd1038f08316e6bf1df107d10"}, + {file = "coverage-7.4.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d6cdecaedea1ea9e033d8adf6a0ab11107b49571bbb9737175444cea6eb72328"}, + {file = "coverage-7.4.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b2eccb883368f9e972e216c7b4c7c06cabda925b5f06dde0650281cb7666a30"}, + {file = "coverage-7.4.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c00cdc8fa4e50e1cc1f941a7f2e3e0f26cb2a1233c9696f26963ff58445bac7"}, + {file = "coverage-7.4.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9a4a8dd3dcf4cbd3165737358e4d7dfbd9d59902ad11e3b15eebb6393b0446e"}, + {file = "coverage-7.4.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:062b0a75d9261e2f9c6d071753f7eef0fc9caf3a2c82d36d76667ba7b6470003"}, + {file = "coverage-7.4.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:ebe7c9e67a2d15fa97b77ea6571ce5e1e1f6b0db71d1d5e96f8d2bf134303c1d"}, + {file = "coverage-7.4.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c0a120238dd71c68484f02562f6d446d736adcc6ca0993712289b102705a9a3a"}, + {file = "coverage-7.4.3-cp312-cp312-win32.whl", hash = "sha256:37389611ba54fd6d278fde86eb2c013c8e50232e38f5c68235d09d0a3f8aa352"}, + {file = "coverage-7.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:d25b937a5d9ffa857d41be042b4238dd61db888533b53bc76dc082cb5a15e914"}, + {file = "coverage-7.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:28ca2098939eabab044ad68850aac8f8db6bf0b29bc7f2887d05889b17346454"}, + {file = "coverage-7.4.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:280459f0a03cecbe8800786cdc23067a8fc64c0bd51dc614008d9c36e1659d7e"}, + {file = "coverage-7.4.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c0cdedd3500e0511eac1517bf560149764b7d8e65cb800d8bf1c63ebf39edd2"}, + {file = "coverage-7.4.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a9babb9466fe1da12417a4aed923e90124a534736de6201794a3aea9d98484e"}, + {file = "coverage-7.4.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dec9de46a33cf2dd87a5254af095a409ea3bf952d85ad339751e7de6d962cde6"}, + {file = "coverage-7.4.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:16bae383a9cc5abab9bb05c10a3e5a52e0a788325dc9ba8499e821885928968c"}, + {file = "coverage-7.4.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2c854ce44e1ee31bda4e318af1dbcfc929026d12c5ed030095ad98197eeeaed0"}, + {file = "coverage-7.4.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ce8c50520f57ec57aa21a63ea4f325c7b657386b3f02ccaedeccf9ebe27686e1"}, + {file = "coverage-7.4.3-cp38-cp38-win32.whl", hash = "sha256:708a3369dcf055c00ddeeaa2b20f0dd1ce664eeabde6623e516c5228b753654f"}, + {file = "coverage-7.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:1bf25fbca0c8d121a3e92a2a0555c7e5bc981aee5c3fdaf4bb7809f410f696b9"}, + {file = "coverage-7.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3b253094dbe1b431d3a4ac2f053b6d7ede2664ac559705a704f621742e034f1f"}, + {file = "coverage-7.4.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:77fbfc5720cceac9c200054b9fab50cb2a7d79660609200ab83f5db96162d20c"}, + {file = "coverage-7.4.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6679060424faa9c11808598504c3ab472de4531c571ab2befa32f4971835788e"}, + {file = "coverage-7.4.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4af154d617c875b52651dd8dd17a31270c495082f3d55f6128e7629658d63765"}, + {file = "coverage-7.4.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8640f1fde5e1b8e3439fe482cdc2b0bb6c329f4bb161927c28d2e8879c6029ee"}, + {file = "coverage-7.4.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:69b9f6f66c0af29642e73a520b6fed25ff9fd69a25975ebe6acb297234eda501"}, + {file = "coverage-7.4.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0842571634f39016a6c03e9d4aba502be652a6e4455fadb73cd3a3a49173e38f"}, + {file = "coverage-7.4.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a78ed23b08e8ab524551f52953a8a05d61c3a760781762aac49f8de6eede8c45"}, + {file = "coverage-7.4.3-cp39-cp39-win32.whl", hash = "sha256:c0524de3ff096e15fcbfe8f056fdb4ea0bf497d584454f344d59fce069d3e6e9"}, + {file = "coverage-7.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:0209a6369ccce576b43bb227dc8322d8ef9e323d089c6f3f26a597b09cb4d2aa"}, + {file = "coverage-7.4.3-pp38.pp39.pp310-none-any.whl", hash = "sha256:7cbde573904625509a3f37b6fecea974e363460b556a627c60dc2f47e2fffa51"}, + {file = "coverage-7.4.3.tar.gz", hash = "sha256:276f6077a5c61447a48d133ed13e759c09e62aff0dc84274a68dc18660104d52"}, ] [package.extras] @@ -154,13 +154,13 @@ toml = ["tomli"] [[package]] name = "docstring-to-markdown" -version = "0.13" +version = "0.15" description = "On the fly conversion of Python docstrings to markdown" optional = false python-versions = ">=3.6" files = [ - {file = "docstring-to-markdown-0.13.tar.gz", hash = "sha256:3025c428638ececae920d6d26054546a20335af3504a145327e657e7ad7ce1ce"}, - {file = "docstring_to_markdown-0.13-py3-none-any.whl", hash = "sha256:aa487059d0883e70e54da25c7b230e918d9e4d40f23d6dfaa2b73e4225b2d7dd"}, + {file = "docstring-to-markdown-0.15.tar.gz", hash = "sha256:e146114d9c50c181b1d25505054a8d0f7a476837f0da2c19f07e06eaed52b73d"}, + {file = "docstring_to_markdown-0.15-py3-none-any.whl", hash = "sha256:27afb3faedba81e34c33521c32bbd258d7fbb79eedf7d29bc4e81080e854aec0"}, ] [[package]] @@ -319,15 +319,60 @@ files = [ [package.dependencies] python-dateutil = ">=2.7" +[[package]] +name = "frozendict" +version = "2.4.0" +description = "A simple immutable dictionary" +optional = false +python-versions = ">=3.6" +files = [ + {file = "frozendict-2.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:475c65202a6f5421df8cacb8a2f29c5087134a0542b0540ae95fbf4db7af2ff9"}, + {file = "frozendict-2.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2607e82efdd2c277224a58bda3994d4cd48e49eff7fa31e404cf3066e8dbfeae"}, + {file = "frozendict-2.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2fd4583194baabe100c135883017da76259a315d34e303eddf198541b7e02e44"}, + {file = "frozendict-2.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efca7281184b54f7abab6980cf25837b709f72ced62791f62dabcd7b184d958a"}, + {file = "frozendict-2.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9fc4cba1ced988ce9020dfcaae6fe3f5521eebc00c5772b511aaf691b0be91e6"}, + {file = "frozendict-2.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8fab616e7c0fea2ac928f107c740bd9ba516fc083adfcd1c391d6bfc9164403d"}, + {file = "frozendict-2.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:09ba8ee37d260adde311b8eb4cd12bf27f64071242f736757ae6a11d331eb860"}, + {file = "frozendict-2.4.0-cp310-cp310-win_arm64.whl", hash = "sha256:0615ed71570eec3cc96df063930ea6e563211efeeac86e3f3cc8bdfc9c9bfab7"}, + {file = "frozendict-2.4.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:cc754117a7d60ba8e55b3c39abd67f37fbc05dd63cdcb03d1717a382fe0a3421"}, + {file = "frozendict-2.4.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2804ea4bd2179bb33b99483cc8d69246630cc00632b9affe2914e8666f1cc7e5"}, + {file = "frozendict-2.4.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd4700c3f0aebdc8f4375c35590135794b1dbf2aca132f4756b584fa9910af2d"}, + {file = "frozendict-2.4.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:da4406d95c340e0b1cc43a3858fac729f52689325bcf61a9182eb94aff7451dc"}, + {file = "frozendict-2.4.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:1875e7b70a5724bf964354da8fd542240d2cead0d80053ac96bf4494ce3517fa"}, + {file = "frozendict-2.4.0-cp36-cp36m-win_amd64.whl", hash = "sha256:a60f353496637ca21396289a7d969af1eb4ec4d11a7c37a0e7f25fc1761a0c97"}, + {file = "frozendict-2.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b666f9c6c8a9e794d2713a944b10a65480ff459579d75b5f686c75031c2c2dfc"}, + {file = "frozendict-2.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9d81fb396ea81fcba3b3dde4a4b51adcb74ff31632014fbfd030f8acd5a7292"}, + {file = "frozendict-2.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4925c8e82d2bd23d45996cd0827668a52b9c51103897c98ce409a763d0c00c61"}, + {file = "frozendict-2.4.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:aa86325da6a6071284b4ed3d9d2cd9db068560aebad503b658d6a889a0575683"}, + {file = "frozendict-2.4.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:5bb5b62d4e2bce12e91800496d94de41bec8f16e4d8a7b16e8f263676ae2031a"}, + {file = "frozendict-2.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:3909df909516cfd7bcefd9a3003948970a12a50c5648d8bbddafcef171f2117f"}, + {file = "frozendict-2.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:204f2c5c10fc018d1ba8ccc67758aa83fe769c782547bd26dc250317a7ccba71"}, + {file = "frozendict-2.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d8d1d269874c94b1ed2b6667e5e43dcf4541838019b1caa4c48f848ac73634df"}, + {file = "frozendict-2.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:809f1cffb602cf06e5186c69c0e3b74bec7a3684593145331f9aa2a65b5ba3b7"}, + {file = "frozendict-2.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b017cba5f73869b04c2977139ad08e57a7480de1e384c34193939698119baa1d"}, + {file = "frozendict-2.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0b75e5e231621dedaef88334997e79fbd137dd89895543d3862fe0220fc3572c"}, + {file = "frozendict-2.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:df3819a5d48ab3aae1548e62093d0111ad7c3b62ff9392421b7bbf149c08b629"}, + {file = "frozendict-2.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:42a9b33ccf9d417b22146e59803c53d5c39d7d9151d2df8df59c235f6a1a5ed7"}, + {file = "frozendict-2.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a3f51bfa64e0c4a6608e3f2878bab1211a6b3b197de6fa57151bbe73f1184457"}, + {file = "frozendict-2.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a1d232f092dc686e6ef23d436bde30f82c018f31cef1b89b31caef03814b1617"}, + {file = "frozendict-2.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e530658134e88607ff8c2c8934a07b2bb5e9fffab5045f127746f6542c6c77e"}, + {file = "frozendict-2.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23a52bbea30c9e35b89291273944393770fb031e522a172e3aff19b62cc50047"}, + {file = "frozendict-2.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f91acaff475d0ef0d3436b805c9b91fc627a6a8a281771a24f7ab7f458a0b34f"}, + {file = "frozendict-2.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:08d9c7c1aa92b94538b3a79c43999f999012e174588435f197794d5e5a80e0f5"}, + {file = "frozendict-2.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:05c5a77957ecba4286c7ab33861a8f4f2badc7ea86fc82b834fb360d3aa4c108"}, + {file = "frozendict-2.4.0-cp39-cp39-win_arm64.whl", hash = "sha256:c8af8a6a39e0050d3f3193cda56c42b43534a9b3995c44241bb9527e3c3fd451"}, + {file = "frozendict-2.4.0.tar.gz", hash = "sha256:c26758198e403337933a92b01f417a8240c954f553e1d4b5e0f8e39d9c8e3f0a"}, +] + [[package]] name = "hypothesis" -version = "6.93.1" +version = "6.98.15" description = "A library for property-based testing" optional = false python-versions = ">=3.8" files = [ - {file = "hypothesis-6.93.1-py3-none-any.whl", hash = "sha256:f2c32911c5ebde7097ac8715670daed0b6fb3431501c2b80cf39ba6bbc66cd7c"}, - {file = "hypothesis-6.93.1.tar.gz", hash = "sha256:637dc3cfb6ba7ac65b4599013f7b364dd99eeacd78e472c196e806ceada7519f"}, + {file = "hypothesis-6.98.15-py3-none-any.whl", hash = "sha256:5b40fd81fce9e0b35f0a47e10eb41f375a6b9e8551d0e1084c83b8b0d0d1bb6b"}, + {file = "hypothesis-6.98.15.tar.gz", hash = "sha256:1e31210951511b24ce8b3b6e04d791c466385a30ac3af571bf2223954b025d77"}, ] [package.dependencies] @@ -335,7 +380,7 @@ attrs = ">=22.2.0" sortedcontainers = ">=2.1.0,<3.0.0" [package.extras] -all = ["backports.zoneinfo (>=0.2.1)", "black (>=19.10b0)", "click (>=7.0)", "django (>=3.2)", "dpcontracts (>=0.4)", "lark (>=0.10.1)", "libcst (>=0.3.16)", "numpy (>=1.17.3)", "pandas (>=1.1)", "pytest (>=4.6)", "python-dateutil (>=1.4)", "pytz (>=2014.1)", "redis (>=3.0.0)", "rich (>=9.0.0)", "tzdata (>=2023.4)"] +all = ["backports.zoneinfo (>=0.2.1)", "black (>=19.10b0)", "click (>=7.0)", "django (>=3.2)", "dpcontracts (>=0.4)", "lark (>=0.10.1)", "libcst (>=0.3.16)", "numpy (>=1.17.3)", "pandas (>=1.1)", "pytest (>=4.6)", "python-dateutil (>=1.4)", "pytz (>=2014.1)", "redis (>=3.0.0)", "rich (>=9.0.0)", "tzdata (>=2024.1)"] cli = ["black (>=19.10b0)", "click (>=7.0)", "rich (>=9.0.0)"] codemods = ["libcst (>=0.3.16)"] dateutil = ["python-dateutil (>=1.4)"] @@ -348,7 +393,7 @@ pandas = ["pandas (>=1.1)"] pytest = ["pytest (>=4.6)"] pytz = ["pytz (>=2014.1)"] redis = ["redis (>=3.0.0)"] -zoneinfo = ["backports.zoneinfo (>=0.2.1)", "tzdata (>=2023.4)"] +zoneinfo = ["backports.zoneinfo (>=0.2.1)", "tzdata (>=2024.1)"] [[package]] name = "iniconfig" @@ -537,52 +582,34 @@ testing = ["funcsigs", "pytest"] [[package]] name = "platformdirs" -version = "4.1.0" +version = "4.2.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.1.0-py3-none-any.whl", hash = "sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380"}, - {file = "platformdirs-4.1.0.tar.gz", hash = "sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420"}, + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, ] [package.extras] -docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] [[package]] name = "pluggy" -version = "1.3.0" +version = "1.4.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" files = [ - {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, - {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, ] [package.extras] dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] -[[package]] -name = "pretty_dump" -version = "3.0" -description = "Diff and dump anything" -optional = false -python-versions = "*" -files = [] -develop = false - -[package.dependencies] -six = "*" - -[package.source] -type = "git" -url = "https://github.com/adfinis/freeze" -reference = "HEAD" -resolved_reference = "a5bd2bdfc68d46df01695079886b3818477f3137" - [[package]] name = "pycodestyle" version = "2.11.1" @@ -725,13 +752,13 @@ pytest-cover = "*" [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -791,13 +818,13 @@ test = ["coverage", "pycodestyle", "pyflakes", "pylint", "pytest", "pytest-cov"] [[package]] name = "python-lsp-server" -version = "1.9.0" +version = "1.10.0" description = "Python Language Server for the Language Server Protocol" optional = false python-versions = ">=3.8" files = [ - {file = "python-lsp-server-1.9.0.tar.gz", hash = "sha256:dc0c8298f0222fd66a52aa3170f3a5c8fe3021007a02098bb72f7fd8df353d13"}, - {file = "python_lsp_server-1.9.0-py3-none-any.whl", hash = "sha256:6b947cf9dc33d7bed9abc936bb173140fcf606b6eb50cf02e27d4cb09f10d3fb"}, + {file = "python-lsp-server-1.10.0.tar.gz", hash = "sha256:0c9a52dcc16cd0562404d529d50a03372db1ea6fb8dfcc3792b3265441c814f4"}, + {file = "python_lsp_server-1.10.0-py3-none-any.whl", hash = "sha256:1a9f338bd7cf3cdde5ae85a2bd93fd5be9e55249f6482d88f99fb6227215424a"}, ] [package.dependencies] @@ -808,30 +835,19 @@ python-lsp-jsonrpc = ">=1.1.0,<2.0.0" ujson = ">=3.0.0" [package.extras] -all = ["autopep8 (>=2.0.4,<2.1.0)", "flake8 (>=6.1.0,<7)", "mccabe (>=0.7.0,<0.8.0)", "pycodestyle (>=2.11.0,<2.12.0)", "pydocstyle (>=6.3.0,<6.4.0)", "pyflakes (>=3.1.0,<3.2.0)", "pylint (>=2.5.0,<3.1)", "rope (>1.2.0)", "whatthepatch (>=1.0.2,<2.0.0)", "yapf (>=0.33.0)"] -autopep8 = ["autopep8 (>=1.6.0,<2.1.0)"] -flake8 = ["flake8 (>=6.1.0,<7)"] +all = ["autopep8 (>=2.0.4,<2.1.0)", "flake8 (>=7,<8)", "mccabe (>=0.7.0,<0.8.0)", "pycodestyle (>=2.11.0,<2.12.0)", "pydocstyle (>=6.3.0,<6.4.0)", "pyflakes (>=3.2.0,<3.3.0)", "pylint (>=2.5.0,<3.1)", "rope (>=1.11.0)", "whatthepatch (>=1.0.2,<2.0.0)", "yapf (>=0.33.0)"] +autopep8 = ["autopep8 (>=2.0.4,<2.1.0)"] +flake8 = ["flake8 (>=7,<8)"] mccabe = ["mccabe (>=0.7.0,<0.8.0)"] pycodestyle = ["pycodestyle (>=2.11.0,<2.12.0)"] pydocstyle = ["pydocstyle (>=6.3.0,<6.4.0)"] -pyflakes = ["pyflakes (>=3.1.0,<3.2.0)"] +pyflakes = ["pyflakes (>=3.2.0,<3.3.0)"] pylint = ["pylint (>=2.5.0,<3.1)"] -rope = ["rope (>1.2.0)"] +rope = ["rope (>=1.11.0)"] test = ["coverage", "flaky", "matplotlib", "numpy", "pandas", "pylint (>=2.5.0,<3.1)", "pyqt5", "pytest", "pytest-cov"] websockets = ["websockets (>=10.3)"] yapf = ["whatthepatch (>=1.0.2,<2.0.0)", "yapf (>=0.33.0)"] -[[package]] -name = "pytz" -version = "2023.3.post1" -description = "World timezone definitions, modern and historical" -optional = false -python-versions = "*" -files = [ - {file = "pytz-2023.3.post1-py2.py3-none-any.whl", hash = "sha256:ce42d816b81b68506614c11e8937d3aa9e41007ceb50bfdcb0749b921bf646c7"}, - {file = "pytz-2023.3.post1.tar.gz", hash = "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b"}, -] - [[package]] name = "pyyaml" version = "6.0.1" @@ -857,6 +873,7 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -986,13 +1003,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] @@ -1089,4 +1106,4 @@ test = ["pytest"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "7efdace4dddfa1c7907356782c43219c62a4f7f949922a582c430505a6098b67" +content-hash = "55048bfcca1dcecc30429ae3fbb3f493a366d88537e1fa8f07cd4ff74a6920fa" diff --git a/pyaptly/__init__.py b/pyaptly/__init__.py index 27b5644..51c7a86 100644 --- a/pyaptly/__init__.py +++ b/pyaptly/__init__.py @@ -3,8 +3,19 @@ Configuration is based on toml input files. """ -from pyaptly.legacy import ( # type: ignore # TODO # noqa: F401 - Command, - SystemStateReader, - main, -) +import os + + +def init_hypothesis(): + """Initialize hypothesis profile if hypothesis is available""" + try: # pragma: no cover + if "HYPOTHESIS_PROFILE" in os.environ: + from hypothesis import settings + + settings.register_profile("ci", settings(max_examples=10000)) + settings.load_profile(os.getenv("HYPOTHESIS_PROFILE", "default")) + except (ImportError, AttributeError): # pragma: no cover + pass + + +init_hypothesis() diff --git a/pyaptly/cli.py b/pyaptly/cli.py index caa3a61..73a27fd 100644 --- a/pyaptly/cli.py +++ b/pyaptly/cli.py @@ -25,9 +25,9 @@ def cli(debug: bool): @cli.command(help="run legacy command parser") def legacy(): """Run legacy pyaptly cli.""" - from pyaptly import main # type: ignore # TODO + from . import main - main() + main.main() @cli.command(help="convert yaml- to toml-comfig") @@ -60,7 +60,7 @@ def legacy(): ) def yaml_to_toml(yaml_path: Path, toml_path: Path, add_defaults: bool): """Convert pyaptly config files from yaml to toml.""" - from pyaptly import config_file + from . import config_file config_file.yaml_to_toml( yaml_path, diff --git a/pyaptly/command.py b/pyaptly/command.py new file mode 100644 index 0000000..1c2027e --- /dev/null +++ b/pyaptly/command.py @@ -0,0 +1,356 @@ +import collections +import logging +import subprocess + +from frozendict import frozendict + +from . import state_reader, types + +lg = logging.getLogger(__name__) + + +class Command(object): + """Repesents a system command and is used to resolve dependencies between + such commands. + + :param cmd: The command as list, one item per argument + :type cmd: list + """ + + pretend_mode = False + + def __init__(self, cmd: list[str]): + assert isinstance(cmd, list) + self.cmd: list[str] = cmd + self._requires: set[tuple[str, str]] = set() + self._provides: set[tuple[str, str]] = set() + self._finished: bool = False + self._known_dependency_types = ( + "snapshot", + "mirror", + "repo", + "publish", + "virtual", + ) + self.frozen = False + + def get_provides(self): # pragma: no cover + """Return all provides of this command. + + :rtype: set()""" + return self._provides + + def append(self, argument): + """Append additional arguments to the command. + + :param argument: String argument to append + :type argument: str""" + assert str(argument) == argument + if self.frozen: # pragma: no cover + raise RuntimeError("Do not modify frozen Command") + self.cmd.append(argument) + + def require(self, type_, identifier): + """Require a dependency for this command. + + :param type_: Type or category of the dependency ie. snapshot + :type type_: str + :param identifier: Identifier of the dependency for example name of a + snapshot + :type identifier: usually str + """ + if self.frozen: # pragma: no cover + raise RuntimeError("Do not modify frozen Command") + assert type_ in ( + self._known_dependency_types + + ("any",) + + state_reader.SystemStateReader.known_dependency_types + ) + self._requires.add((type_, str(identifier))) + + def provide(self, type_, identifier): + """Provide a dependency for this command. + + :param type_: Type or category of the dependency ie. snapshot + :type type_: str + :param identifier: Identifier of the dependency for example name of a + snapshot + :type identifier: usually str + """ + if self.frozen: # pragma: no cover + raise RuntimeError("Do not modify frozen Command") + assert type_ in self._known_dependency_types + self._provides.add((type_, str(identifier))) + + def execute(self): + """Execute the command. Return the return value of the command. + + :rtype: integer""" + if self._finished: # pragma: no cover + return self._finished + + if not Command.pretend_mode: + lg.debug("Running command: %s", " ".join(self.cmd)) + self._finished = bool(subprocess.check_call(self.cmd)) + else: + lg.info("Pretending to run command: %s", " ".join(self.cmd)) + + return self._finished + + def repr_cmd(self): + """Return repr of the command. + + :rtype: str""" + return repr(self.cmd) + + def _freeze_common(self): + if not self.frozen: + self.frozen = True + # manually checking using self.frozen + self._requires = frozenset(self._requires) # type: ignore + self._provides = frozenset(self._provides) # type: ignore + + def freeze(self): + self._freeze_common() + # manually checking using self.frozen + self.cmd = tuple(self.cmd) # type: ignore + + def _hash_base(self): + self.freeze() + return hash((type(self), self._requires, self._provides)) + + def _eq_base(self, other): + self.freeze() + other.freeze() + return ( + type(self) == type(other) + and self._requires == other._requires + and self._provides == other._provides + ) + + def __hash__(self): + """Hash of the command. + + :rtype: integer""" + dependencies_hash = self._hash_base() + return hash((self.cmd, dependencies_hash)) + + def __eq__(self, other): + """Equalitity based on the hash, might collide... hmm""" + return self._eq_base(other) and self.cmd == other.cmd + + def __repr__(self): + return "Command<%s requires %s, provides %s>\n" % ( + self.repr_cmd(), + ", ".join([repr(x) for x in self._requires]), + ", ".join([repr(x) for x in self._provides]), + ) + + @staticmethod + def command_list_to_digraph(commands): # pragma: no cover + """Generate dot source for a digraph - suitable for generating + diagrams. + + The requires and provides from the commands build nodes, the commands + themselves act as connectors. + + :param commands: The commands to draw a diagram with + :type commands: list + """ + + nodes = set() + edges = set() + + def result_node(type_, name): + """Get the dot representation of a result node.""" + return ( + '"%s %s" [shape=ellipse]' % (type_, name), + '"%s %s"' % (type_, name), + ) + + def cmd_node(command): + """Get the dot representation of a command node.""" + return ( + '"%s" [shape=box]' % command.repr_cmd(), + '"%s"' % command.repr_cmd(), + ) + + for cmd in commands: + if cmd is None: + continue + + cmd_spec, cmd_identifier = cmd_node(cmd) + nodes.add(cmd_spec) + + for type_, name in cmd._requires: + spec, identifier = result_node(type_, name) + nodes.add(spec) + edges.add((identifier, cmd_identifier)) + + for type_, name in cmd._provides: + spec, identifier = result_node(type_, name) + nodes.add(spec) + edges.add((cmd_identifier, identifier)) + + template = """ + digraph { + %s; + %s; + } + """ + return template % ( + ";\n".join(nodes), + ";\n".join(["%s -> %s" % edge for edge in edges]), + ) + + @staticmethod + def order_commands(commands, has_dependency_cb=lambda x: False): + """Order the commands according to the dependencies they + provide/require. + + :param commands: The commands to order + :type commands: list + :param has_dependency_cb: Optional callback the resolve external + dependencies + :type has_dependency_cb: function""" + + commands = set([c for c in commands if c is not None]) + + lg.debug("Ordering commands: %s", [str(cmd) for cmd in commands]) + + have_requirements: dict["Command", int] = collections.defaultdict(lambda: 0) + required_number: dict["Command", int] = collections.defaultdict(lambda: 0) + scheduled = [] + + for cmd in commands: + for provide in cmd._provides: + required_number[provide] += 1 + + something_changed = True + while something_changed: + something_changed = False + + for cmd in commands: + if cmd in scheduled: + continue + + can_schedule = True + for req in cmd._requires: + if have_requirements[req] < required_number[req]: + lg.debug( + "%s: dependency %s not fulfilled, " + "checking aptly state" % (cmd, req) + ) + # No command providing our dependency.. Let's see if + # it's already otherwise fulfilled + if not has_dependency_cb(req): + lg.debug( + "%s: dependency %s not " + "in aptly state either" % (cmd, req) + ) + can_schedule = False + # Break out of the requirements loop, as the + # command cannot be scheduled anyway. + break + # command cannot be scheduled anyway. + break + + if can_schedule: + lg.debug("%s: all dependencies fulfilled" % cmd) + scheduled.append(cmd) + for provide in cmd._provides: + have_requirements[provide] += 1 + + something_changed = True + + unresolved = [cmd for cmd in commands if cmd not in scheduled] + + if len(unresolved) > 0: # pragma: no cover + raise ValueError( + "Commands with unresolved deps: %s" % [str(cmd) for cmd in unresolved] + ) + + # Just one last verification before we commence + scheduled_set = set([cmd for cmd in scheduled]) + incoming_set = set([cmd for cmd in commands]) + assert incoming_set == scheduled_set + + lg.info("Reordered commands: %s", [str(cmd) for cmd in scheduled]) + + return scheduled + + +class FunctionCommand(Command): + """Repesents a function command and is used to resolve dependencies between + such commands. This command executes the given function. \*args and + \*\*kwargs are passed through. + + :param func: The function to execute + :type func: callable + """ + + def __init__(self, func, *args, **kwargs): + super().__init__([]) + + assert hasattr(func, "__call__") + self.cmd = [str(id(func))] + self.func = func + self.args = args + self.kwargs = kwargs + + def freeze(self): + self._freeze_common() + # manually checking using self.frozen + self.kwargs = frozendict(self.kwargs) # type: ignore + + def __hash__(self): + dependencies_hash = self._hash_base() + return hash((id(self.func), self.args, self.kwargs, dependencies_hash)) + + def __eq__(self, other): + return ( + self._eq_base(other) + and id(self.func) == id(other.func) + and self.args == other.args + and self.kwargs == other.kwargs + ) + + def execute(self): + """Execute the command. (Call the function).""" + if self._finished: # pragma: no cover + return self._finished + if not Command.pretend_mode: + lg.debug( + "Running code: %s(args=%s, kwargs=%s)", + self.func.__name__, + repr(self.args), + repr(self.kwargs), + ) + + self.func(*self.args, **self.kwargs) + + self._finished = True + else: # pragma: no cover + lg.info( + "Pretending to run code: %s(args=%s, kwargs=%s)", + self.repr_cmd(), + repr(self.args), + repr(self.kwargs), + ) + + return self._finished + + def repr_cmd(self): + """Return repr of the command. + + :rtype: str""" + # We need to "id" ourselves here so that multiple commands that call a + # function with the same name won't be shown as being equal. + return "%s|%s" % (self.func.__name__, id(self)) + + def __repr__(self): + return "FunctionCommand<%s requires %s, provides %s>\n" % ( + self.repr_cmd(), + ", ".join([repr(x) for x in self._requires]), + ", ".join([repr(x) for x in self._provides]), + ) diff --git a/pyaptly/conftest.py b/pyaptly/conftest.py index c24c8a3..6d8157a 100644 --- a/pyaptly/conftest.py +++ b/pyaptly/conftest.py @@ -11,8 +11,7 @@ import tomli import yaml -import pyaptly -from pyaptly import util +from pyaptly import main, state_reader, util aptly_conf = Path.home().absolute() / ".aptly.conf" test_base = Path(__file__).absolute().parent / "tests" @@ -131,14 +130,14 @@ def test_mirror_create(environment, config, caplog): def mirror_update(environment, config): """Test if updating mirrors works.""" args = ["-c", config, "mirror", "create"] - state = pyaptly.SystemStateReader() + state = state_reader.SystemStateReader() state.read() assert "fakerepo01" not in state.mirrors - pyaptly.main(args) + main.main(args) state.read() assert "fakerepo01" in state.mirrors args[3] = "update" - pyaptly.main(args) + main.main(args) args = [ "aptly", "mirror", @@ -154,8 +153,8 @@ def mirror_update(environment, config): def snapshot_create(config, mirror_update, freeze): """Test if createing snapshots works.""" args = ["-c", config, "snapshot", "create"] - pyaptly.main(args) - state = pyaptly.SystemStateReader() + main.main(args) + state = state_reader.SystemStateReader() state.read() assert set(["fakerepo01-20121010T0000Z", "fakerepo02-20121006T0000Z"]).issubset( state.snapshots @@ -172,8 +171,8 @@ def snapshot_update_rotating(config, mirror_update, freeze): "snapshot", "create", ] - pyaptly.main(args) - state = pyaptly.SystemStateReader() + main.main(args) + state = state_reader.SystemStateReader() state.read() assert set( [ @@ -188,7 +187,7 @@ def snapshot_update_rotating(config, mirror_update, freeze): "snapshot", "update", ] - pyaptly.main(args) + main.main(args) state.read() assert set( [ @@ -217,8 +216,8 @@ def snapshot_update_rotating(config, mirror_update, freeze): def repo_create(environment, config, test_key_03): """Test if creating repositories works.""" args = ["-c", config, "repo", "create"] - pyaptly.main(args) - state = pyaptly.SystemStateReader() + main.main(args) + state = state_reader.SystemStateReader() state.read() util.run_command( [ @@ -236,8 +235,8 @@ def repo_create(environment, config, test_key_03): def publish_create(config, snapshot_create, test_key_03): """Test if creating publishes works.""" args = ["-c", config, "publish", "create"] - pyaptly.main(args) - state = pyaptly.SystemStateReader() + main.main(args) + state = state_reader.SystemStateReader() state.read() assert set(["fakerepo02 main", "fakerepo01 main"]) == state.publishes expect = { @@ -251,8 +250,8 @@ def publish_create(config, snapshot_create, test_key_03): def publish_create_rotating(config, snapshot_update_rotating, test_key_03): """Test if creating publishes works.""" args = ["-c", config, "publish", "create"] - pyaptly.main(args) - state = pyaptly.SystemStateReader() + main.main(args) + state = state_reader.SystemStateReader() state.read() assert ( set( @@ -287,7 +286,7 @@ def publish_create_republish(config, publish_create, caplog): "publish", "create", ] - pyaptly.main(args) - state = pyaptly.SystemStateReader() + main.main(args) + state = state_reader.SystemStateReader() state.read() assert "fakerepo01-stable main" in state.publishes diff --git a/pyaptly/date_tools.py b/pyaptly/date_tools.py new file mode 100644 index 0000000..49fe745 --- /dev/null +++ b/pyaptly/date_tools.py @@ -0,0 +1,231 @@ +import datetime + + +def iso_first_week_start(iso_year, tzinfo=None): + """The gregorian calendar date of the first day of the given ISO year + + :param iso_year: Year to find the date of the first week. + :type iso_year: int""" + fourth_jan = datetime.datetime(iso_year, 1, 4, tzinfo=tzinfo) + delta = datetime.timedelta(fourth_jan.isoweekday() - 1) + return fourth_jan - delta + + +def iso_to_gregorian(iso_year, iso_week, iso_day, tzinfo=None): + """Gregorian calendar date for the given ISO year, week and day + + :param iso_year: ISO year + :type iso_year: int + :param iso_week: ISO week + :type iso_week: int + :param iso_day: ISO day + :type iso_day: int""" + year_start = iso_first_week_start(iso_year, tzinfo) + return year_start + datetime.timedelta(days=iso_day - 1, weeks=iso_week - 1) + + +def time_remove_tz(time): + """Convert a :py:class`datetime.time` to :py:class`datetime.time` to + without tzinfo. + + :param time: Time to convert + :type time: :py:class:`datetime.time` + :rtype: :py:class:`datetime.time` + """ + return datetime.time( + hour=time.hour, + minute=time.minute, + second=time.second, + microsecond=time.microsecond, + ) + + +def time_delta_helper(time): # pragma: no cover + """Convert a :py:class`datetime.time` to :py:class`datetime.datetime` to + calculate deltas + + :param time: Time to convert + :type time: :py:class:`datetime.time` + :rtype: :py:class:`datetime.datetime` + """ + return datetime.datetime( + year=2000, + month=1, + day=1, + hour=time.hour, + minute=time.minute, + second=time.second, + microsecond=time.microsecond, + tzinfo=time.tzinfo, + ) + + +def date_round_weekly(date, day_of_week=1, time=None): + """Round datetime back (floor) to a given the of the week. + + THIS FUNCTION IGNORES THE TZINFO OF TIME and assumes it is the same tz as + the date. + + :param date: Datetime object to round + :type date: :py:class:`datetime.datetime` + :param day_of_week: ISO day of week: monday is 1 and sunday is 7 + :type day_of_week: int + :param time: Roundpoint in the day (tzinfo ignored) + :type time: :py:class:`datetime.time` + :rtype: :py:class:`datetime.datetime`""" + if time: + time = time_remove_tz(time) + else: # pragma: no cover + time = datetime.time(hour=0, minute=0) + + delta = datetime.timedelta( + days=day_of_week - 1, + hours=time.hour, + minutes=time.minute, + seconds=time.second, + microseconds=time.microsecond, + ) + raster_date = date - delta + iso = raster_date.isocalendar() + rounded_date = iso_to_gregorian(iso[0], iso[1], 1, date.tzinfo) + return rounded_date + delta + + +def date_round_daily(date, time=None): + """Round datetime to day back (floor) to the roundpoint (time) in the day + + THIS FUNCTION IGNORES THE TZINFO OF TIME and assumes it is the same tz as + the date. + + :param date: Datetime object to round + :type date: :py:class:`datetime.datetime` + :param time: Roundpoint in the day (tzinfo ignored) + :type time: :py:class:`datetime.time` + :rtype: :py:class:`datetime.datetime`""" + if time: + time = time_remove_tz(time) + else: # pragma: no cover + time = datetime.time(hour=0, minute=0) + delta = datetime.timedelta( + hours=time.hour, + minutes=time.minute, + seconds=time.second, + microseconds=time.microsecond, + ) + raster_date = date - delta + rounded_date = datetime.datetime( + year=raster_date.year, + month=raster_date.month, + day=raster_date.day, + tzinfo=raster_date.tzinfo, + ) + return rounded_date + delta + + +def expand_timestamped_name(name, timestamp_config, date=None): + """Expand a timestamped name using round_timestamp. + + :param timestamp_config: Contains the recurrence specification for the + timestamp. See :func:`round_timestamp` + :type timestamp_config: dict + :param date: The date to expand the timestamp with. + :type date: :py:class:`datetime.datetime`""" + if "%T" not in name: + return name + timestamp = round_timestamp(timestamp_config, date) + return name.replace("%T", timestamp.strftime("%Y%m%dT%H%MZ")) + + +def round_timestamp(timestamp_config, date=None): + """Round the given name by adding a timestamp. + + The contents of the timestamp is configured by the given timestamp_config + dict, which MUST contain a "time" key, and MAY contain a "repeat-weekly" + key. + + If the key "repeat-weekly" is given, it is expected to contain a + three-letter weekday name (mon, tue, thu, ...). The "time" key is expected + to be a 24 hour HH:MM time specification. + + Timestamps are rounded down to the nearest time as specified (which may be + on the previous day. If repeat-weekly is specified, it is rounded down + (back in time) to the given weekday.) + + The name parameter may be a simple string. If it contains the marker "%T", + then this placeholder will be replaced by the timestamp. If it does NOT + contain that marker, then nothing happens (and the timestamp_config is not + evaluated at all) + + If a datetime object is given as third parameter, then it is used to + generate the timestamp. If it is omitted, the current date/time is used. + + Example: + >>> expand_timestamped_name( + ... 'foo-%T', + ... {'timestamp': {'time': '00:00'}}, + ... datetime.datetime(2015,10,7, 15,30) # A Wednesday + ... ) + 'foo-20151007T0000Z' + + >>> expand_timestamped_name( + ... 'foo-%T', + ... {'timestamp': {'time': '00:00', 'repeat-weekly': 'mon'}}, + ... datetime.datetime(2015,10,8, 15,30) # A Thursday + ... ) + 'foo-20151005T0000Z' + + >>> expand_timestamped_name( + ... 'foo', # No %T placeholder, timestamp info is ignored + ... {'timestamp': {'time': '00:00', 'repeat-weekly': 'mon'}}, + ... datetime.datetime(2015,10,8, 15,30) + ... ) + 'foo' + + :param timestamp_config: Contains the recurrence specification for the + timestamp. + :type timestamp_config: dict + :param date: The date to expand the timestamp with. + :type date: :py:class:`datetime.datetime` + """ + timestamp_info = timestamp_config.get("timestamp", timestamp_config) + config_time = timestamp_info.get("time", "FAIL") + if config_time == "FAIL": # pragma: no cover + raise ValueError( + "Timestamp config has no valid time entry: %s" % str(timestamp_config) + ) + + config_repeat_weekly = timestamp_info.get("repeat-weekly", None) + + hour, minute = [int(x) for x in config_time.split(":")][:2] + + if date is None: + date = datetime.datetime.now() + + if config_repeat_weekly is not None: + day_of_week = day_of_week_map.get(config_repeat_weekly.lower()) + + timestamp = date_round_weekly( + date, day_of_week, datetime.time(hour=hour, minute=minute) + ) + else: + timestamp = date_round_daily(date, datetime.time(hour=hour, minute=minute)) + return timestamp + + +def format_timestamp(timestamp): + """Wrapper for strftime, to ensure we're all using the same format. + + :param timestamp: The timestamp to format + :type timestamp: :py:class:`datetime.datetime`""" + return timestamp.strftime("%Y%m%dT%H%MZ") + + +day_of_week_map = { + "mon": 1, + "tue": 2, + "wed": 3, + "thu": 4, + "fri": 5, + "sat": 6, + "sun": 7, +} diff --git a/pyaptly/legacy.py b/pyaptly/legacy.py deleted file mode 100755 index ae58487..0000000 --- a/pyaptly/legacy.py +++ /dev/null @@ -1,1678 +0,0 @@ -# type: ignore # TODO -# flake8: noqa # TODO - -#!/usr/bin/env python2 -"""Aptly mirror/snapshot managment automation.""" -import argparse -import codecs -import collections -import datetime -import logging -import os -import re -import subprocess -import sys - -import freeze -import six -import yaml - -from .util import get_default_keyserver - -_logging_setup = False - -if six.PY2: - environb = os.environ # pragma: no cover -else: - environb = os.environb # pragma: no cover - - -def init_hypothesis(): - """Initialize hypothesis profile if hypothesis is available""" - try: # pragma: no cover:w - if b"HYPOTHESIS_PROFILE" in environb: - from hypothesis import Settings - - Settings.register_profile("ci", Settings(max_examples=10000)) - Settings.load_profile(os.getenv("HYPOTHESIS_PROFILE", "default")) - except (ImportError, AttributeError): # pragma: no cover - pass - - -def get_logger(): - """Get the logger. - - :rtype: logging.Logger""" - return logging.getLogger("pyaptly") - - -lg = get_logger() -init_hypothesis() - - -def iso_first_week_start(iso_year, tzinfo=None): - """The gregorian calendar date of the first day of the given ISO year - - :param iso_year: Year to find the date of the first week. - :type iso_year: int""" - fourth_jan = datetime.datetime(iso_year, 1, 4, tzinfo=tzinfo) - delta = datetime.timedelta(fourth_jan.isoweekday() - 1) - return fourth_jan - delta - - -def iso_to_gregorian(iso_year, iso_week, iso_day, tzinfo=None): - """Gregorian calendar date for the given ISO year, week and day - - :param iso_year: ISO year - :type iso_year: int - :param iso_week: ISO week - :type iso_week: int - :param iso_day: ISO day - :type iso_day: int""" - year_start = iso_first_week_start(iso_year, tzinfo) - return year_start + datetime.timedelta(days=iso_day - 1, weeks=iso_week - 1) - - -def time_remove_tz(time): - """Convert a :py:class`datetime.time` to :py:class`datetime.time` to - without tzinfo. - - :param time: Time to convert - :type time: :py:class:`datetime.time` - :rtype: :py:class:`datetime.time` - """ - return datetime.time( - hour=time.hour, - minute=time.minute, - second=time.second, - microsecond=time.microsecond, - ) - - -def time_delta_helper(time): # pragma: no cover - """Convert a :py:class`datetime.time` to :py:class`datetime.datetime` to - calculate deltas - - :param time: Time to convert - :type time: :py:class:`datetime.time` - :rtype: :py:class:`datetime.datetime` - """ - return datetime.datetime( - year=2000, - month=1, - day=1, - hour=time.hour, - minute=time.minute, - second=time.second, - microsecond=time.microsecond, - tzinfo=time.tzinfo, - ) - - -def date_round_weekly(date, day_of_week=1, time=None): - """Round datetime back (floor) to a given the of the week. - - THIS FUNCTION IGNORES THE TZINFO OF TIME and assumes it is the same tz as - the date. - - :param date: Datetime object to round - :type date: :py:class:`datetime.datetime` - :param day_of_week: ISO day of week: monday is 1 and sunday is 7 - :type day_of_week: int - :param time: Roundpoint in the day (tzinfo ignored) - :type time: :py:class:`datetime.time` - :rtype: :py:class:`datetime.datetime`""" - if time: - time = time_remove_tz(time) - else: # pragma: no cover - time = datetime.time(hour=0, minute=0) - - delta = datetime.timedelta( - days=day_of_week - 1, - hours=time.hour, - minutes=time.minute, - seconds=time.second, - microseconds=time.microsecond, - ) - raster_date = date - delta - iso = raster_date.isocalendar() - rounded_date = iso_to_gregorian(iso[0], iso[1], 1, date.tzinfo) - return rounded_date + delta - - -def date_round_daily(date, time=None): - """Round datetime to day back (floor) to the roundpoint (time) in the day - - THIS FUNCTION IGNORES THE TZINFO OF TIME and assumes it is the same tz as - the date. - - :param date: Datetime object to round - :type date: :py:class:`datetime.datetime` - :param time: Roundpoint in the day (tzinfo ignored) - :type time: :py:class:`datetime.time` - :rtype: :py:class:`datetime.datetime`""" - if time: - time = time_remove_tz(time) - else: # pragma: no cover - time = datetime.time(hour=0, minute=0) - delta = datetime.timedelta( - hours=time.hour, - minutes=time.minute, - seconds=time.second, - microseconds=time.microsecond, - ) - raster_date = date - delta - rounded_date = datetime.datetime( - year=raster_date.year, - month=raster_date.month, - day=raster_date.day, - tzinfo=raster_date.tzinfo, - ) - return rounded_date + delta - - -def call_output(args, input_=None): - """Call command and return output. - - :param args: Command to execute - :type args: list - :param input_: Input to command - :type input_: bytes - """ - p = subprocess.Popen( - args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE - ) - output, err = p.communicate(input_) - if p.returncode != 0: - raise subprocess.CalledProcessError( - p.returncode, - args, - output, - err, - ) - return (output.decode("UTF-8"), err.decode("UTF-8")) - - -class Command(object): - """Repesents a system command and is used to resolve dependencies between - such commands. - - :param cmd: The command as list, one item per argument - :type cmd: list - """ - - pretend_mode = False - - def __init__(self, cmd): - self.cmd = cmd - self._requires = set() - self._provides = set() - self._finished = None - self._known_dependency_types = ( - "snapshot", - "mirror", - "repo", - "publish", - "virtual", - ) - - def get_provides(self): # pragma: no cover - """Return all provides of this command. - - :rtype: set()""" - return self._provides - - def append(self, argument): - """Append additional arguments to the command. - - :param argument: String argument to append - :type argument: str""" - assert str(argument) == argument - self.cmd.append(argument) - - def require(self, type_, identifier): - """Require a dependency for this command. - - :param type_: Type or category of the dependency ie. snapshot - :type type_: str - :param identifier: Identifier of the dependency for example name of a - snapshot - :type identifier: usually str - """ - assert type_ in ( - self._known_dependency_types - + ("any",) - + SystemStateReader.known_dependency_types - ) - self._requires.add((type_, str(identifier))) - - def provide(self, type_, identifier): - """Provide a dependency for this command. - - :param type_: Type or category of the dependency ie. snapshot - :type type_: str - :param identifier: Identifier of the dependency for example name of a - snapshot - :type identifier: usually str - """ - assert type_ in self._known_dependency_types - self._provides.add((type_, str(identifier))) - - def execute(self): - """Execute the command. Return the return value of the command. - - :rtype: integer""" - if self._finished is not None: # pragma: no cover - return self._finished - - if not Command.pretend_mode: - lg.debug("Running command: %s", " ".join(self.cmd)) - self._finished = subprocess.check_call(self.cmd) - else: - lg.info("Pretending to run command: %s", " ".join(self.cmd)) - - return self._finished - - def repr_cmd(self): - """Return repr of the command. - - :rtype: str""" - return repr(self.cmd) - - def __hash__(self): - """Hash of the command. - - :rtype: integer""" - return freeze.recursive_hash((self.cmd, self._requires, self._provides)) - - def __eq__(self, other): - """Equalitity based on the hash, might collide... hmm""" - return self.__hash__() == other.__hash__() - - def __repr__(self): - return "Command<%s requires %s, provides %s>\n" % ( - self.repr_cmd(), - ", ".join([repr(x) for x in self._requires]), - ", ".join([repr(x) for x in self._provides]), - ) - - @staticmethod - def command_list_to_digraph(commands): # pragma: no cover - """Generate dot source for a digraph - suitable for generating - diagrams. - - The requires and provides from the commands build nodes, the commands - themselves act as connectors. - - :param commands: The commands to draw a diagram with - :type commands: list - """ - - nodes = set() - edges = set() - - def result_node(type_, name): - """Get the dot representation of a result node.""" - return ( - '"%s %s" [shape=ellipse]' % (type_, name), - '"%s %s"' % (type_, name), - ) - - def cmd_node(command): - """Get the dot representation of a command node.""" - return ( - '"%s" [shape=box]' % command.repr_cmd(), - '"%s"' % command.repr_cmd(), - ) - - for cmd in commands: - if cmd is None: - continue - - cmd_spec, cmd_identifier = cmd_node(cmd) - nodes.add(cmd_spec) - - for type_, name in cmd._requires: - spec, identifier = result_node(type_, name) - nodes.add(spec) - edges.add((identifier, cmd_identifier)) - - for type_, name in cmd._provides: - spec, identifier = result_node(type_, name) - nodes.add(spec) - edges.add((cmd_identifier, identifier)) - - template = """ - digraph { - %s; - %s; - } - """ - return template % ( - ";\n".join(nodes), - ";\n".join(["%s -> %s" % edge for edge in edges]), - ) - - @staticmethod - def order_commands(commands, has_dependency_cb=lambda x: False): - """Order the commands according to the dependencies they - provide/require. - - :param commands: The commands to order - :type commands: list - :param has_dependency_cb: Optional callback the resolve external - dependencies - :type has_dependency_cb: function""" - - commands = set([c for c in commands if c is not None]) - - lg.debug("Ordering commands: %s", [str(cmd) for cmd in commands]) - - have_requirements = collections.defaultdict(lambda: 0) - required_number = collections.defaultdict(lambda: 0) - scheduled = [] - - for cmd in commands: - for provide in cmd._provides: - required_number[provide] += 1 - - something_changed = True - while something_changed: - something_changed = False - - for cmd in commands: - if cmd in scheduled: - continue - - can_schedule = True - for req in cmd._requires: - if have_requirements[req] < required_number[req]: - lg.debug( - "%s: dependency %s not fulfilled, " - "checking aptly state" % (cmd, req) - ) - # No command providing our dependency.. Let's see if - # it's already otherwise fulfilled - if not has_dependency_cb(req): - lg.debug( - "%s: dependency %s not " - "in aptly state either" % (cmd, req) - ) - can_schedule = False - # Break out of the requirements loop, as the - # command cannot be scheduled anyway. - break - - if can_schedule: - lg.debug("%s: all dependencies fulfilled" % cmd) - scheduled.append(cmd) - for provide in cmd._provides: - have_requirements[provide] += 1 - - something_changed = True - - unresolved = [cmd for cmd in commands if cmd not in scheduled] - - if len(unresolved) > 0: # pragma: no cover - raise ValueError( - "Commands with unresolved deps: %s" % [str(cmd) for cmd in unresolved] - ) - - # Just one last verification before we commence - scheduled_set = set([cmd for cmd in scheduled]) - incoming_set = set([cmd for cmd in commands]) - assert incoming_set == scheduled_set - - lg.info("Reordered commands: %s", [str(cmd) for cmd in scheduled]) - - return scheduled - - -class FunctionCommand(Command): - """Repesents a function command and is used to resolve dependencies between - such commands. This command executes the given function. \*args and - \*\*kwargs are passed through. - - :param func: The function to execute - :type func: callable - """ - - def __init__(self, func, *args, **kwargs): - super(FunctionCommand, self).__init__(None) - - assert hasattr(func, "__call__") - self.cmd = func - self.args = args - self.kwargs = kwargs - - def __hash__(self): - return freeze.recursive_hash( - (id(self.cmd), self.args, self.kwargs, self._requires, self._provides) - ) - - def execute(self): - """Execute the command. (Call the function).""" - if self._finished is not None: # pragma: no cover - return self._finished - - if not Command.pretend_mode: - lg.debug( - "Running code: %s(args=%s, kwargs=%s)", - self.cmd.__name__, - repr(self.args), - repr(self.kwargs), - ) - - self.cmd(*self.args, **self.kwargs) - - self._finished = True - else: # pragma: no cover - lg.info( - "Pretending to run code: %s(args=%s, kwargs=%s)", - self.repr_cmd(), - repr(self.args), - repr(self.kwargs), - ) - - return self._finished - - def repr_cmd(self): - """Return repr of the command. - - :rtype: str""" - # We need to "id" ourselves here so that multiple commands that call a - # function with the same name won't be shown as being equal. - return "%s|%s" % (self.cmd.__name__, id(self)) - - def __repr__(self): - return "FunctionCommand<%s requires %s, provides %s>\n" % ( - self.repr_cmd(), - ", ".join([repr(x) for x in self._requires]), - ", ".join([repr(x) for x in self._provides]), - ) - - -class SystemStateReader(object): - """Reads the state from aptly and gpg to find out what operations have to - be performed to reach the state defined in the yml config-file. - """ - - known_dependency_types = ("repo", "snapshot", "mirror", "gpg_key") - - def __init__(self): - self.gpg_keys = set() - self.mirrors = set() - self.repos = set() - self.snapshots = set() - self.snapshot_map = {} - self.publishes = set() - self.publish_map = {} - - def _extract_sources(self, data): - """ - Extract sources from data. - - Data needs to be in following format: - Name: test-snap - Description: some description - Sources: - test-snap-base [snapshot] - """ - entered_sources = False - sources = [] - for line in data.split("\n"): - # source line need to start with two spaces - if entered_sources and line[0:2] != " ": - break - - if entered_sources: - sources.append(line) - - if line == "Sources:": - entered_sources = True - - return sources - - def read(self): - """Reads all available system states.""" - self.read_gpg() - self.read_repos() - self.read_mirror() - self.read_snapshot() - self.read_snapshot_map() - self.read_publishes() - self.read_publish_map() - - def read_gpg(self): - """Read all trusted keys in gpg.""" - self.gpg_keys = set() - cmd = [ - "gpg", - "--no-default-keyring", - "--keyring", - "trustedkeys.gpg", - "--list-keys", - "--with-colons", - ] - data, _ = call_output(cmd) - lg.debug("GPG returned: %s", data) - for line in data.split("\n"): - field = line.split(":") - if field[0] in ("pub", "sub"): - key = field[4] - key_short = key[8:] - self.gpg_keys.add(key) - self.gpg_keys.add(key_short) - - def read_publish_map(self): - """Create a publish map. publish -> snapshots""" - self.publish_map = {} - # match example: main: test-snapshot [snapshot] - re_snap = re.compile(r"\s+[\w\d-]+\:\s([\w\d-]+)\s\[snapshot\]") - for publish in self.publishes: - prefix, dist = publish.split(" ") - data, _ = call_output(["aptly", "publish", "show", dist, prefix]) - - sources = self._extract_sources(data) - matches = [re_snap.match(source) for source in sources] - snapshots = [match.group(1) for match in matches if match] - self.publish_map[publish] = set(snapshots) - - lg.debug("Joined snapshots and publishes: %s", self.publish_map) - - def read_snapshot_map(self): - """Create a snapshot map. snapshot -> snapshots. This is also called - merge-tree.""" - self.snapshot_map = {} - # match example: test-snapshot [snapshot] - re_snap = re.compile(r"\s+([\w\d-]+)\s\[snapshot\]") - for snapshot_outer in self.snapshots: - data, _ = call_output(["aptly", "snapshot", "show", snapshot_outer]) - sources = self._extract_sources(data) - matches = [re_snap.match(source) for source in sources] - snapshots = [match.group(1) for match in matches if match] - self.snapshot_map[snapshot_outer] = set(snapshots) - - lg.debug("Joined snapshots with self(snapshots): %s", self.snapshot_map) - - def read_publishes(self): - """Read all available publishes.""" - self.publishes = set() - self.read_aptly_list("publish", self.publishes) - - def read_repos(self): - """Read all available repos.""" - self.repos = set() - self.read_aptly_list("repo", self.repos) - - def read_mirror(self): - """Read all available mirrors.""" - self.mirrors = set() - self.read_aptly_list("mirror", self.mirrors) - - def read_snapshot(self): - """Read all available snapshots.""" - self.snapshots = set() - self.read_aptly_list("snapshot", self.snapshots) - - def read_aptly_list(self, type_, list_): - """Generic method to read lists from aptly. - - :param type_: The type of list to read ie. snapshot - :type type_: str - :param list_: Read into this list - :param list_: list""" - data, _ = call_output(["aptly", type_, "list", "-raw"]) - lg.debug("Aptly returned %s: %s", type_, data) - for line in data.split("\n"): - clean_line = line.strip() - if clean_line: - list_.add(clean_line) - - def has_dependency(self, dependency): - """Check system state dependencies. - - :param dependency: The dependency to check - :type dependency: list""" - type_, name = dependency - - if type_ == "repo": # pragma: no cover - return name in self.repos - if type_ == "mirror": # pragma: no cover - return name in self.mirrors - elif type_ == "snapshot": - return name in self.snapshots # pragma: no cover - elif type_ == "gpg_key": # pragma: no cover - return name in self.gpg_keys # Not needed ATM - elif type_ == "virtual": - # virtual dependencies can never be resolved by the - # system state reader - they are used for internal - # ordering only - return False - else: - raise ValueError("Unknown dependency to resolve: %s" % str(dependency)) - - -state = SystemStateReader() - - -def main(argv=None): - """Called by command-line, defines parsers and executes commands. - - :param argv: Arguments usually taken from sys.argv - :type argv: list""" - global _logging_setup - if not argv: # pragma: no cover - argv = sys.argv[1:] - parser = argparse.ArgumentParser(description="Manage aptly") - parser.add_argument( - "--config", - "-c", - help="Yaml config file defining mirrors and snapshots", - type=str, - required=True, - ) - parser.add_argument( - "--debug", - "-d", - help="Enable debug output", - action="store_true", - ) - parser.add_argument( - "--pretend", - "-p", - help="Do not do anything, just print out what WOULD be done", - action="store_true", - ) - subparsers = parser.add_subparsers() - mirror_parser = subparsers.add_parser("mirror", help="manage aptly mirrors") - mirror_parser.set_defaults(func=mirror) - mirror_parser.add_argument("task", type=str, choices=["create", "update"]) - mirror_parser.add_argument("mirror_name", type=str, nargs="?", default="all") - snap_parser = subparsers.add_parser("snapshot", help="manage aptly snapshots") - snap_parser.set_defaults(func=snapshot) - snap_parser.add_argument("task", type=str, choices=["create", "update"]) - snap_parser.add_argument("snapshot_name", type=str, nargs="?", default="all") - publish_parser = subparsers.add_parser( - "publish", help="manage aptly publish endpoints" - ) - publish_parser.set_defaults(func=publish) - publish_parser.add_argument("task", type=str, choices=["create", "update"]) - publish_parser.add_argument("publish_name", type=str, nargs="?", default="all") - repo_parser = subparsers.add_parser("repo", help="manage aptly repositories") - repo_parser.set_defaults(func=repo) - repo_parser.add_argument("task", type=str, choices=["create"]) - repo_parser.add_argument("repo_name", type=str, nargs="?", default="all") - - args = parser.parse_args(argv) - root = logging.getLogger() - formatter = logging.Formatter( - "%(asctime)s - %(name)s - %(levelname)s - %(message)s" - ) - if not _logging_setup: # noqa - handler = logging.StreamHandler(sys.stderr) - handler.setFormatter(formatter) - root.addHandler(handler) - handler.setLevel(logging.CRITICAL) - if args.debug: - root.setLevel(logging.DEBUG) - handler.setLevel(logging.DEBUG) - if args.pretend: - Command.pretend_mode = True - else: - Command.pretend_mode = False - - _logging_setup = True # noqa - lg.debug("Args: %s", vars(args)) - - with codecs.open(args.config, "r", encoding="UTF-8") as cfgfile: - cfg = yaml.load(cfgfile, Loader=yaml.FullLoader) - state.read() - - # run function for selected subparser - args.func(cfg, args) - - -day_of_week_map = { - "mon": 1, - "tue": 2, - "wed": 3, - "thu": 4, - "fri": 5, - "sat": 6, - "sun": 7, -} - - -def expand_timestamped_name(name, timestamp_config, date=None): - """Expand a timestamped name using round_timestamp. - - :param timestamp_config: Contains the recurrence specification for the - timestamp. See :func:`round_timestamp` - :type timestamp_config: dict - :param date: The date to expand the timestamp with. - :type date: :py:class:`datetime.datetime`""" - if "%T" not in name: - return name - timestamp = round_timestamp(timestamp_config, date) - return name.replace("%T", timestamp.strftime("%Y%m%dT%H%MZ")) - - -def round_timestamp(timestamp_config, date=None): - """Round the given name by adding a timestamp. - - The contents of the timestamp is configured by the given timestamp_config - dict, which MUST contain a "time" key, and MAY contain a "repeat-weekly" - key. - - If the key "repeat-weekly" is given, it is expected to contain a - three-letter weekday name (mon, tue, thu, ...). The "time" key is expected - to be a 24 hour HH:MM time specification. - - Timestamps are rounded down to the nearest time as specified (which may be - on the previous day. If repeat-weekly is specified, it is rounded down - (back in time) to the given weekday.) - - The name parameter may be a simple string. If it contains the marker "%T", - then this placeholder will be replaced by the timestamp. If it does NOT - contain that marker, then nothing happens (and the timestamp_config is not - evaluated at all) - - If a datetime object is given as third parameter, then it is used to - generate the timestamp. If it is omitted, the current date/time is used. - - Example: - >>> expand_timestamped_name( - ... 'foo-%T', - ... {'timestamp': {'time': '00:00'}}, - ... datetime.datetime(2015,10,7, 15,30) # A Wednesday - ... ) - 'foo-20151007T0000Z' - - >>> expand_timestamped_name( - ... 'foo-%T', - ... {'timestamp': {'time': '00:00', 'repeat-weekly': 'mon'}}, - ... datetime.datetime(2015,10,8, 15,30) # A Thursday - ... ) - 'foo-20151005T0000Z' - - >>> expand_timestamped_name( - ... 'foo', # No %T placeholder, timestamp info is ignored - ... {'timestamp': {'time': '00:00', 'repeat-weekly': 'mon'}}, - ... datetime.datetime(2015,10,8, 15,30) - ... ) - 'foo' - - :param timestamp_config: Contains the recurrence specification for the - timestamp. - :type timestamp_config: dict - :param date: The date to expand the timestamp with. - :type date: :py:class:`datetime.datetime` - """ - timestamp_info = timestamp_config.get("timestamp", timestamp_config) - config_time = timestamp_info.get("time", "FAIL") - if config_time == "FAIL": # pragma: no cover - raise ValueError( - "Timestamp config has no valid time entry: %s" % str(timestamp_config) - ) - - config_repeat_weekly = timestamp_info.get("repeat-weekly", None) - - hour, minute = [int(x) for x in config_time.split(":")][:2] - - if date is None: - date = datetime.datetime.now() - - if config_repeat_weekly is not None: - day_of_week = day_of_week_map.get(config_repeat_weekly.lower()) - - timestamp = date_round_weekly( - date, day_of_week, datetime.time(hour=hour, minute=minute) - ) - else: - timestamp = date_round_daily(date, datetime.time(hour=hour, minute=minute)) - return timestamp - - -def unit_or_list_to_list(thingy): - """Ensures that a yml entry is always a list. Used to allow lists and - single units in the yml file. - - :param thingy: The data to ensure it is a list - :type thingy: list, tuple or other""" - if isinstance(thingy, list) or isinstance(thingy, tuple): - return list(thingy) - else: - return [thingy] - - -def publish_cmd_create(cfg, publish_name, publish_config, ignore_existing=False): - """Creates a publish command with its dependencies to be ordered and - executed later. - - :param cfg: pyaptly config - :type cfg: dict - :param publish_name: Name of the publish to create - :type publish_name: str - :param publish_config: Configuration of the publish from the yml file. - :type publish_config: dict""" - publish_fullname = "%s %s" % (publish_name, publish_config["distribution"]) - if publish_fullname in state.publishes and not ignore_existing: - # Nothing to do, publish already created - return - - publish_cmd = ["aptly", "publish"] - options = [] - source_args = [] - endpoint_args = [publish_name] - - has_source = False - num_sources = 0 - - for conf, conf_value in publish_config.items(): - if conf == "skip-contents": - if conf_value: - options.append("-skip-contents=true") - elif conf == "architectures": # pragma: no cover - options.append( - "-architectures=%s" % ",".join(unit_or_list_to_list(conf_value)) - ) - elif conf == "components": - components = unit_or_list_to_list(conf_value) - options.append("-component=%s" % ",".join(components)) - elif conf == "label": # pragma: no cover - options.append("-label=%s" % conf_value) - elif conf == "origin": # pragma: no cover - options.append("-origin=%s" % conf_value) - - elif conf == "distribution": - options.append("-distribution=%s" % conf_value) - - elif conf == "gpg-key": - options.append("-gpg-key=%s" % conf_value) - elif conf == "automatic-update": - # Ignored here - pass - elif conf == "snapshots": - if has_source: # pragma: no cover - raise ValueError( - "Multiple sources for publish %s %s" - % (publish_name, publish_config) - ) - has_source = True - snapshots = unit_or_list_to_list(conf_value) - source_args.append("snapshot") - source_args.extend( - [snapshot_spec_to_name(cfg, conf_value) for conf_value in snapshots] - ) - - num_sources = len(snapshots) - - elif conf == "repo": - if has_source: # pragma: no cover - raise ValueError( - "Multiple sources for publish %s %s" - % (publish_name, publish_config) - ) - has_source = True - source_args = ["repo", conf_value] - num_sources = 1 - elif conf == "publish": - if has_source: # pragma: no cover - raise ValueError( - "Multiple sources for publish %s %s" - % (publish_name, publish_config) - ) - has_source = True - conf_value = " ".join(conf_value.split("/")) - source_args.append("snapshot") - try: - sources = state.publish_map[conf_value] - except KeyError: - lg.critical( - ( - "Creating %s has been deferred, please call publish " - "create again" - ) - % publish_name - ) - return - source_args.extend(sources) - num_sources = len(sources) - else: # pragma: no cover - raise ValueError( - "Don't know how to handle publish config entry %s in %s" - % ( - conf, - publish_name, - ) - ) - assert has_source - assert len(components) == num_sources - - return Command(publish_cmd + options + source_args + endpoint_args) - - -def clone_snapshot(origin, destination): - """Creates a clone snapshot command with dependencies to be ordered and - executed later. - - :param origin: The snapshot to clone - :type origin: str - :param destination: The new name of the snapshot - :type destination: str""" - cmd = Command(["aptly", "snapshot", "merge", destination, origin]) - cmd.provide("snapshot", destination) - cmd.require("snapshot", origin) - return cmd - - -def publish_cmd_update(cfg, publish_name, publish_config, ignore_existing=False): - """Creates a publish command with its dependencies to be ordered and - executed later. - - :param cfg: pyaptly config - :type cfg: dict - :param publish_name: Name of the publish to update - :type publish_name: str - :param publish_config: Configuration of the publish from the yml file. - :type publish_config: dict""" - - publish_cmd = ["aptly", "publish"] - options = [] - args = [publish_config["distribution"], publish_name] - - if "skip-contents" in publish_config and publish_config["skip-contents"]: - options.append("-skip-contents=true") - - if "repo" in publish_config: - publish_cmd.append("update") - return Command(publish_cmd + options + args) - - publish_fullname = "%s %s" % (publish_name, publish_config["distribution"]) - current_snapshots = state.publish_map[publish_fullname] - if "snapshots" in publish_config: - snapshots_config = publish_config["snapshots"] - new_snapshots = [snapshot_spec_to_name(cfg, snap) for snap in snapshots_config] - elif "publish" in publish_config: - conf_value = publish_config["publish"] - snapshots_config = [] - ref_publish_name, distribution = conf_value.split(" ") - for publish in cfg["publish"][ref_publish_name]: - if publish["distribution"] == distribution: - snapshots_config.extend(publish["snapshots"]) - break - new_snapshots = list(state.publish_map[conf_value]) - else: # pragma: no cover - raise ValueError( - "No snapshot references configured in publish %s" % publish_name - ) - - if set(new_snapshots) == set(current_snapshots) and not ignore_existing: - # Already pointing to the newest snapshot, nothing to do - return - components = unit_or_list_to_list(publish_config["components"]) - - for snap in snapshots_config: - # snap may be a plain name or a dict.. - if hasattr(snap, "items"): - # Dict mode - only here can we even have an archive option - archive = snap.get("archive-on-update", None) - - if archive: - # Replace any timestamp placeholder with the current - # date/time. Note that this is NOT rounded, as we want to - # know exactly when the archival happened. - archive = archive.replace( - "%T", format_timestamp(datetime.datetime.now()) - ) - if archive in state.snapshots: # pragma: no cover - continue - prefix_to_search = re.sub("%T$", "", snap["name"]) - - current_snapshot = [ - snap_name - for snap_name in sorted(current_snapshots, key=lambda x: -len(x)) - if snap_name.startswith(prefix_to_search) - ][0] - - clone_snapshot(current_snapshot, archive).execute() - - publish_cmd.append("switch") - options.append("-component=%s" % ",".join(components)) - - if "skip-contents" in publish_config and publish_config["skip-contents"]: - options.append("-skip-contents=true") - - return Command(publish_cmd + options + args + new_snapshots) - - -def repo_cmd_create(cfg, repo_name, repo_config): - """Create a repo create command to be ordered and executed later. - - :param cfg: pyaptly config - :type cfg: dict - :param repo_name: Name of the repo to create - :type repo_name: str - :param repo_config: Configuration of the repo from the yml file. - :type repo_config: dict""" - - if repo_name in state.repos: # pragma: no cover - # Nothing to do, repo already created - return - - repo_cmd = ["aptly", "repo"] - options = [] - endpoint_args = ["create", repo_name] - - for conf, conf_value in repo_config.items(): - if conf == "architectures": - options.append( - "-architectures=%s" % ",".join(unit_or_list_to_list(conf_value)) - ) - elif conf == "component": - components = unit_or_list_to_list(conf_value) - options.append("-component=%s" % ",".join(components)) - elif conf == "comment": # pragma: no cover - options.append("-comment=%s" % conf_value) - elif conf == "distribution": - options.append("-distribution=%s" % conf_value) - else: # pragma: no cover - raise ValueError( - "Don't know how to handle repo config entry %s in %s" - % ( - conf, - repo_name, - ) - ) - - return Command(repo_cmd + options + endpoint_args) - - -def repo(cfg, args): - """Creates repository commands, orders and executes them. - - :param cfg: The configuration yml as dict - :type cfg: dict - :param args: The command-line arguments read with :py:mod:`argparse` - :type args: namespace""" - lg.debug("Repositories to create: %s", cfg["repo"]) - - repo_cmds = { - "create": repo_cmd_create, - } - - cmd_repo = repo_cmds[args.task] - - if args.repo_name == "all": - commands = [ - cmd_repo(cfg, repo_name, repo_conf) - for repo_name, repo_conf in cfg["repo"].items() - ] - - for cmd in Command.order_commands(commands, state.has_dependency): - cmd.execute() - - else: - if args.repo_name in cfg["repo"]: - commands = [cmd_repo(cfg, args.repo_name, cfg["repo"][args.repo_name])] - for cmd in Command.order_commands(commands, state.has_dependency): - cmd.execute() - else: - raise ValueError( - "Requested publish is not defined in config file: %s" % (args.repo_name) - ) - - -def publish(cfg, args): - """Creates publish commands, orders and executes them. - - :param cfg: The configuration yml as dict - :type cfg: dict - :param args: The command-line arguments read with :py:mod:`argparse` - :type args: namespace""" - lg.debug("Publishes to create / update: %s", cfg["publish"]) - - # aptly publish snapshot -components ... -architectures ... -distribution - # ... -origin Ubuntu trusty-stable ubuntu/stable - - publish_cmds = { - "create": publish_cmd_create, - "update": publish_cmd_update, - } - - cmd_publish = publish_cmds[args.task] - - if args.publish_name == "all": - commands = [ - cmd_publish(cfg, publish_name, publish_conf_entry) - for publish_name, publish_conf in cfg["publish"].items() - for publish_conf_entry in publish_conf - if publish_conf_entry.get("automatic-update", "false") is True - ] - - for cmd in Command.order_commands(commands, state.has_dependency): - cmd.execute() - - else: - if args.publish_name in cfg["publish"]: - commands = [ - cmd_publish(cfg, args.publish_name, publish_conf_entry) - for publish_conf_entry in cfg["publish"][args.publish_name] - ] - for cmd in Command.order_commands(commands, state.has_dependency): - cmd.execute() - else: - raise ValueError( - "Requested publish is not defined in config file: %s" - % (args.publish_name) - ) - - -def snapshot(cfg, args): - """Creates snapshot commands, orders and executes them. - - :param cfg: The configuration yml as dict - :type cfg: dict - :param args: The command-line arguments read with :py:mod:`argparse` - :type args: namespace""" - lg.debug("Snapshots to create: %s", cfg["snapshot"].keys()) - - snapshot_cmds = { - "create": cmd_snapshot_create, - "update": cmd_snapshot_update, - } - - cmd_snapshot = snapshot_cmds[args.task] - - if args.snapshot_name == "all": - commands = [ - cmd - for snapshot_name, snapshot_config in cfg["snapshot"].items() - for cmd in cmd_snapshot(cfg, snapshot_name, snapshot_config) - ] - - if args.debug: # pragma: no cover - dot_file = "/tmp/commands.dot" - with codecs.open(dot_file, "w", "UTF-8") as fh_dot: - fh_dot.write(Command.command_list_to_digraph(commands)) - lg.info("Wrote command dependency tree graph to %s", dot_file) - - if len(commands) > 0: - for cmd in Command.order_commands(commands, state.has_dependency): - cmd.execute() - - else: - if args.snapshot_name in cfg["snapshot"]: - commands = cmd_snapshot( - cfg, args.snapshot_name, cfg["snapshot"][args.snapshot_name] - ) - - if len(commands) > 0: - for cmd in Command.order_commands(commands, state.has_dependency): - cmd.execute() - - else: - raise ValueError( - "Requested snapshot is not defined in config file: %s" - % (args.snapshot_name) - ) - - -def format_timestamp(timestamp): - """Wrapper for strftime, to ensure we're all using the same format. - - :param timestamp: The timestamp to format - :type timestamp: :py:class:`datetime.datetime`""" - return timestamp.strftime("%Y%m%dT%H%MZ") - - -back_reference_map = { - "current": 0, - "previous": 1, -} - - -def snapshot_spec_to_name(cfg, snapshot): - """Converts a given snapshot short spec to a name. - - A short spec is a value that may either be a string or a dict. - - If it's a string, everything is fine and we just use that as - a snapshot name. - - However if it's a dict, we assume it has the following keys: - - * name: template for the snapshot - * timestamp: information on how to generate the timestamp. - - For further information regarding the timestamp's data structure, - consult the documentation of expand_timestamped_name(). - - :param cfg: Complete yaml config - :type cfg: dict - :param snapshot: Config of the snapshot - :type snapshot: dict - """ - delta = datetime.timedelta(seconds=1) - if hasattr(snapshot, "items"): - name = snapshot["name"] - if "timestamp" not in snapshot: - return name - - ts = snapshot["timestamp"] - back_ref = back_reference_map.get(ts) - if back_ref is None: - back_ref = int(ts) - reference = cfg["snapshot"][name] - - timestamp = datetime.datetime.now() - for _ in range(back_ref + 1): - timestamp = round_timestamp(reference["timestamp"], timestamp) - timestamp -= delta - - timestamp += delta - return name.replace("%T", format_timestamp(timestamp)) - else: # pragma: no cover - return snapshot - - -def dependents_of_snapshot(snapshot_name): - """Yield a flat list of dependents from the current state. - - :rtype: generator""" - for dependent in state.snapshot_map.get(snapshot_name, []): - yield dependent - for sub in dependents_of_snapshot(dependent): # pragma: no cover - yield dependent - - -def rotate_snapshot(cfg, snapshot_name): - """Creates a command to rotate a snapshot in order to be able to update a - current publish. - - :param cfg: pyaptly config - :type cfg: dict - :param snapshot_name: the snapshot to rotate - :type snapshot_name: str""" - rotated_name = cfg["snapshot"][snapshot_name].get( - "rotate_via", - "%s-rotated-%s" % (snapshot_name, format_timestamp(datetime.datetime.now())), - ) - - # First, verify that our snapshot environment is in a sane state. - # Fixing the environment is not currently our task. - - if rotated_name in state.snapshots: # pragma: no cover - raise Exception( - "Cannot update snapshot %s - rotated name %s already exists" - % (snapshot_name, rotated_name) - ) - - cmd = Command(["aptly", "snapshot", "rename", snapshot_name, rotated_name]) - - cmd.provide("virtual", rotated_name) - return cmd - - -def cmd_snapshot_update(cfg, snapshot_name, snapshot_config): - """Create commands to update all rotating snapshots. - - :param cfg: pyaptly config - :type cfg: dict - :param snapshot_name: Name of the snapshot to update/rotate - :type snapshot_name: str - :param snapshot_config: Configuration of the snapshot from the yml file. - :type snapshot_config: dict""" - - # To update a snapshot, we need to do roughly the following steps: - # 1) Rename the current snapshot and all snapshots that depend on it - # 2) Create new version of the snapshot and all snapshots that depend on it - # 3) Recreate all renamed snapshots - # 4) Update / switch-over publishes - # 5) Remove the rotated temporary snapshots - - if "%T" in snapshot_name: # pragma: no cover - # Timestamped snapshots are never rotated by design. - return [] - - affected_snapshots = [snapshot_name] - affected_snapshots.extend(list(dependents_of_snapshot(snapshot_name))) - - # TODO: rotated snapshots should be identified by configuration option, not - # just by "not being timestamped - - rename_cmds = [rotate_snapshot(cfg, snap) for snap in affected_snapshots] - - # The "intermediate" command causes the state reader to refresh. At the - # same time, it provides a collection point for dependency handling. - intermediate = FunctionCommand(state.read) - intermediate.provide("virtual", "all-snapshots-rotated") - - for cmd in rename_cmds: - # Ensure that our "intermediate" pseudo command comes after all - # the rename commands, by ensuring it depends on all their "virtual" - # provided items. - cmd_vprovides = [ - provide for ptype, provide in cmd.get_provides() if ptype == "virtual" - ] - for provide in cmd_vprovides: - intermediate.require("virtual", provide) - - # Same as before - create a focal point to "collect" dependencies - # after the snapshots have been rebuilt. Also reload state once again - intermediate2 = FunctionCommand(state.read) - intermediate2.provide("virtual", "all-snapshots-rebuilt") - - create_cmds = [] - for _ in affected_snapshots: - # Well.. there's normally just one, but since we need interface - # consistency, cmd_snapshot_create() returns a list. And since it - # returns a list, we may just as well future-proof it and loop instead - # of assuming it's going to be a single entry (and fail horribly if - # this assumption changes in the future). - for create_cmd in cmd_snapshot_create( - cfg, snapshot_name, cfg["snapshot"][snapshot_name], ignore_existing=True - ): - # enforce cmd to run after the refresh, and thus also - # after all the renames - create_cmd.require("virtual", "all-snapshots-rotated") - - # Evil hack - we must do the dependencies ourselves, to avoid - # getting a circular graph - create_cmd._requires = set( - [ - (type_, req) - for type_, req in create_cmd._requires - if type_ != "snapshot" - ] - ) - - create_cmd.provide("virtual", "readyness-for-%s" % snapshot_name) - for follower in dependents_of_snapshot(snapshot_name): - create_cmd.require("virtual", "readyness-for-%s" % follower) - - # "Focal point" - make intermediate2 run after all the commands - # that re-create the snapshots - create_cmd.provide("virtual", "rebuilt-%s" % snapshot_name) - intermediate2.require("virtual", "rebuilt-%s" % snapshot_name) - - create_cmds.append(create_cmd) - - # At this point, snapshots have been renamed, then recreated. - # After each of the steps, the system state has been re-read. - # So now, we're left with updating the publishes. - - def is_publish_affected(name, publish): - if "%s %s" % (name, publish["distribution"]) in state.publishes: - try: - for snap in publish["snapshots"]: - snap_name = snapshot_spec_to_name(cfg, snap) - if snap_name in affected_snapshots: - return True - except KeyError: # pragma: no cover - lg.debug( - ( - "Publish endpoint %s is not affected because it has no " - "snapshots defined" - ) - % name - ) - return False - return False - - if "publish" in cfg: - all_publish_commands = [ - publish_cmd_update( - cfg, publish_name, publish_conf_entry, ignore_existing=True - ) - for publish_name, publish_conf in cfg["publish"].items() - for publish_conf_entry in publish_conf - if publish_conf_entry.get("automatic-update", "false") is True - if is_publish_affected(publish_name, publish_conf_entry) - ] - else: - all_publish_commands = [] - - republish_cmds = [c for c in all_publish_commands if c] - - # Ensure that the republish commands run AFTER the snapshots are rebuilt - for cmd in republish_cmds: - cmd.require("virtual", "all-snapshots-rebuilt") - - # TODO: - # - We need to cleanup all the rotated snapshots after the publishes are - # rebuilt - # - Filter publishes, so only the non-timestamped publishes are rebuilt - - return rename_cmds + create_cmds + republish_cmds + [intermediate, intermediate2] - - -def cmd_snapshot_create(cfg, snapshot_name, snapshot_config, ignore_existing=False): - """Create a snapshot create command to be ordered and executed later. - - :param cfg: pyaptly config - :type cfg: dict - :param snapshot_name: Name of the snapshot to create - :type snapshot_name: str - :param snapshot_config: Configuration of the snapshot from the yml file. - :type snapshot_config: dict - :param ignore_existing: Optional, defaults to False. If set to True, still - return a command object even if the requested - snapshot already exists - :type ignore_existing: dict - - :rtype: Command - """ - - # TODO: extract possible timestamp component - # and generate *actual* snapshot name - - snapshot_name = expand_timestamped_name(snapshot_name, snapshot_config) - - if snapshot_name in state.snapshots and not ignore_existing: - return [] - - default_aptly_cmd = ["aptly", "snapshot", "create"] - default_aptly_cmd.append(snapshot_name) - default_aptly_cmd.append("from") - - if "mirror" in snapshot_config: - cmd = Command(default_aptly_cmd + ["mirror", snapshot_config["mirror"]]) - cmd.provide("snapshot", snapshot_name) - cmd.require("mirror", snapshot_config["mirror"]) - return [cmd] - - elif "repo" in snapshot_config: - cmd = Command(default_aptly_cmd + ["repo", snapshot_config["repo"]]) - cmd.provide("snapshot", snapshot_name) - cmd.require("repo", snapshot_config["repo"]) - return [cmd] - - elif "filter" in snapshot_config: - cmd = Command( - [ - "aptly", - "snapshot", - "filter", - snapshot_spec_to_name(cfg, snapshot_config["filter"]["source"]), - snapshot_name, - snapshot_config["filter"]["query"], - ] - ) - cmd.provide("snapshot", snapshot_name) - cmd.require( - "snapshot", snapshot_spec_to_name(cfg, snapshot_config["filter"]["source"]) - ) - return [cmd] - - elif "merge" in snapshot_config: - cmd = Command( - [ - "aptly", - "snapshot", - "merge", - snapshot_name, - ] - ) - cmd.provide("snapshot", snapshot_name) - - for source in snapshot_config["merge"]: - source_name = snapshot_spec_to_name(cfg, source) - cmd.append(source_name) - cmd.require("snapshot", source_name) - - return [cmd] - - else: # pragma: no cover - raise ValueError("Don't know how to handle snapshot config" % (snapshot_config)) - - -def mirror(cfg, args): - """Creates mirror commands, orders and executes them. - - :param cfg: The configuration yml as dict - :type cfg: dict - :param args: The command-line arguments read with :py:mod:`argparse` - :type args: namespace""" - lg.debug("Mirrors to create: %s", cfg["mirror"]) - - mirror_cmds = { - "create": cmd_mirror_create, - "update": cmd_mirror_update, - } - - cmd_mirror = mirror_cmds[args.task] - - if args.mirror_name == "all": - for mirror_name, mirror_config in cfg["mirror"].items(): - cmd_mirror(cfg, mirror_name, mirror_config) - else: - if args.mirror_name in cfg["mirror"]: - cmd_mirror(cfg, args.mirror_name, cfg["mirror"][args.mirror_name]) - else: - raise ValueError( - "Requested mirror is not defined in config file: %s" - % (args.mirror_name) - ) - - -def add_gpg_keys(mirror_config): - """Uses the gpg command-line to download and add gpg keys needed to create - mirrors. - - :param mirror_config: The configuration yml as dict - :type mirror_config: dict - """ - keyserver = mirror_config.get("keyserver") - if not keyserver: - keyserver = get_default_keyserver() - keys_urls = {} - if "gpg-keys" in mirror_config: - keys = unit_or_list_to_list(mirror_config["gpg-keys"]) - if "gpg-urls" in mirror_config: - urls = unit_or_list_to_list(mirror_config["gpg-urls"]) - urls_len = len(urls) - for x in range(len(keys)): - if x < urls_len: - url = urls[x] - else: # pragma: no cover - url = None - keys_urls[keys[x]] = url - else: - for key in keys: - keys_urls[key] = None - - for key in keys_urls.keys(): - if key in state.gpg_keys: - continue - try: - key_command = [ - "gpg", - "--no-default-keyring", - "--keyring", - "trustedkeys.gpg", - "--keyserver", - keyserver, - "--recv-keys", - key, - ] - lg.debug("Adding gpg key with call: %s", key_command) - subprocess.check_call(key_command) - except subprocess.CalledProcessError: # pragma: no cover - url = keys_urls[key] - if url: - key_command = ( - "curl %s | " - "gpg --no-default-keyring --keyring trustedkeys.gpg " - "--import" - ) % url - subprocess.check_call(["bash", "-c", key_command]) - else: - raise - state.read_gpg() - - -def cmd_mirror_create(cfg, mirror_name, mirror_config): - """Create a mirror create command to be ordered and executed later. - - :param cfg: The configuration yml as dict - :type cfg: dict - :param mirror_name: Name of the mirror to create - :type mirror_name: str - :param mirror_config: Configuration of the snapshot from the yml file. - :type mirror_config: dict""" - - if mirror_name in state.mirrors: # pragma: no cover - return - - add_gpg_keys(mirror_config) - aptly_cmd = ["aptly", "mirror", "create"] - - if "sources" in mirror_config and mirror_config["sources"]: - aptly_cmd.append("-with-sources") - else: - aptly_cmd.append("-with-sources=false") - - if "udeb" in mirror_config and mirror_config["udeb"]: - aptly_cmd.append("-with-udebs") - - if "architectures" in mirror_config: - aptly_cmd.append( - "-architectures={0}".format( - ",".join(unit_or_list_to_list(mirror_config["architectures"])) - ) - ) - - aptly_cmd.append(mirror_name) - aptly_cmd.append(mirror_config["archive"]) - aptly_cmd.append(mirror_config["distribution"]) - aptly_cmd.extend(unit_or_list_to_list(mirror_config["components"])) - - lg.debug("Running command: %s", " ".join(aptly_cmd)) - subprocess.check_call(aptly_cmd) - - -def cmd_mirror_update(cfg, mirror_name, mirror_config): - """Create a mirror update command to be ordered and executed later. - - :param cfg: pyaptly config - :type cfg: dict - :param mirror_name: Name of the mirror to create - :type mirror_name: str - :param mirror_config: Configuration of the snapshot from the yml file. - :type mirror_config: dict""" - if mirror_name not in state.mirrors: # pragma: no cover - raise Exception("Mirror not created yet") - add_gpg_keys(mirror_config) - aptly_cmd = ["aptly", "mirror", "update"] - if "max-tries" in mirror_config: - aptly_cmd.append("-max-tries=%d" % mirror_config["max-tries"]) - - aptly_cmd.append(mirror_name) - lg.debug("Running command: %s", " ".join(aptly_cmd)) - subprocess.check_call(aptly_cmd) - - -if __name__ == "__main__": # pragma: no cover - main() diff --git a/pyaptly/main.py b/pyaptly/main.py new file mode 100755 index 0000000..b6b8ed4 --- /dev/null +++ b/pyaptly/main.py @@ -0,0 +1,119 @@ +"""Aptly mirror/snapshot managment automation.""" +import argparse +import codecs +import logging +import subprocess +import sys + +import yaml + +from . import command, mirror, publish, repo, snapshot, state_reader + +_logging_setup = False + + +lg = logging.getLogger(__name__) + + +# TODO remove +def call_output(args, input_=None): + """Call command and return output. + + :param args: Command to execute + :type args: list + :param input_: Input to command + :type input_: bytes + """ + p = subprocess.Popen( + args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE + ) + output, err = p.communicate(input_) + if p.returncode != 0: + raise subprocess.CalledProcessError( + p.returncode, + args, + output, + err, + ) + return (output.decode("UTF-8"), err.decode("UTF-8")) + + +def main(argv=None): + """Called by command-line, defines parsers and executes commands. + + :param argv: Arguments usually taken from sys.argv + :type argv: list""" + global _logging_setup + if not argv: # pragma: no cover + argv = sys.argv[1:] + parser = argparse.ArgumentParser(description="Manage aptly") + parser.add_argument( + "--config", + "-c", + help="Yaml config file defining mirrors and snapshots", + type=str, + required=True, + ) + parser.add_argument( + "--debug", + "-d", + help="Enable debug output", + action="store_true", + ) + parser.add_argument( + "--pretend", + "-p", + help="Do not do anything, just print out what WOULD be done", + action="store_true", + ) + subparsers = parser.add_subparsers() + mirror_parser = subparsers.add_parser("mirror", help="manage aptly mirrors") + mirror_parser.set_defaults(func=mirror.mirror) + mirror_parser.add_argument("task", type=str, choices=["create", "update"]) + mirror_parser.add_argument("mirror_name", type=str, nargs="?", default="all") + snap_parser = subparsers.add_parser("snapshot", help="manage aptly snapshots") + snap_parser.set_defaults(func=snapshot.snapshot) + snap_parser.add_argument("task", type=str, choices=["create", "update"]) + snap_parser.add_argument("snapshot_name", type=str, nargs="?", default="all") + publish_parser = subparsers.add_parser( + "publish", help="manage aptly publish endpoints" + ) + publish_parser.set_defaults(func=publish.publish) + publish_parser.add_argument("task", type=str, choices=["create", "update"]) + publish_parser.add_argument("publish_name", type=str, nargs="?", default="all") + repo_parser = subparsers.add_parser("repo", help="manage aptly repositories") + repo_parser.set_defaults(func=repo.repo) + repo_parser.add_argument("task", type=str, choices=["create"]) + repo_parser.add_argument("repo_name", type=str, nargs="?", default="all") + + args = parser.parse_args(argv) + root = logging.getLogger() + formatter = logging.Formatter( + "%(asctime)s - %(name)s - %(levelname)s - %(message)s" + ) + if not _logging_setup: # noqa + handler = logging.StreamHandler(sys.stderr) + handler.setFormatter(formatter) + root.addHandler(handler) + handler.setLevel(logging.CRITICAL) + if args.debug: + root.setLevel(logging.DEBUG) + handler.setLevel(logging.DEBUG) + if args.pretend: + command.Command.pretend_mode = True + else: + command.Command.pretend_mode = False + + _logging_setup = True # noqa + lg.debug("Args: %s", vars(args)) + + with codecs.open(args.config, "r", encoding="UTF-8") as cfgfile: + cfg = yaml.load(cfgfile, Loader=yaml.FullLoader) + state_reader.state.read() + + # run function for selected subparser + args.func(cfg, args) + + +if __name__ == "__main__": # pragma: no cover + main() diff --git a/pyaptly/mirror.py b/pyaptly/mirror.py new file mode 100644 index 0000000..c676066 --- /dev/null +++ b/pyaptly/mirror.py @@ -0,0 +1,152 @@ +import logging +import subprocess + +from . import state_reader, util + +lg = logging.getLogger(__name__) + + +def add_gpg_keys(mirror_config): + """Uses the gpg command-line to download and add gpg keys needed to create + mirrors. + + :param mirror_config: The configuration yml as dict + :type mirror_config: dict + """ + keyserver = mirror_config.get("keyserver") + if not keyserver: + keyserver = util.get_default_keyserver() + keys_urls = {} + if "gpg-keys" in mirror_config: + keys = util.unit_or_list_to_list(mirror_config["gpg-keys"]) + if "gpg-urls" in mirror_config: + urls = util.unit_or_list_to_list(mirror_config["gpg-urls"]) + urls_len = len(urls) + for x in range(len(keys)): + if x < urls_len: + url = urls[x] + else: # pragma: no cover + url = None + keys_urls[keys[x]] = url + else: + for key in keys: + keys_urls[key] = None + + for key in keys_urls.keys(): + if key in state_reader.state.gpg_keys: + continue + try: + key_command = [ + "gpg", + "--no-default-keyring", + "--keyring", + "trustedkeys.gpg", + "--keyserver", + keyserver, + "--recv-keys", + key, + ] + lg.debug("Adding gpg key with call: %s", key_command) + subprocess.check_call(key_command) + except subprocess.CalledProcessError: # pragma: no cover + url = keys_urls[key] + if url: + key_shell = ( + "curl %s | " + "gpg --no-default-keyring --keyring trustedkeys.gpg " + "--import" + ) % url + subprocess.check_call(["bash", "-c", key_shell]) + else: + raise + state_reader.state.read_gpg() + + +def mirror(cfg, args): + """Creates mirror commands, orders and executes them. + + :param cfg: The configuration yml as dict + :type cfg: dict + :param args: The command-line arguments read with :py:mod:`argparse` + :type args: namespace""" + lg.debug("Mirrors to create: %s", cfg["mirror"]) + + mirror_cmds = { + "create": cmd_mirror_create, + "update": cmd_mirror_update, + } + + cmd_mirror = mirror_cmds[args.task] + + if args.mirror_name == "all": + for mirror_name, mirror_config in cfg["mirror"].items(): + cmd_mirror(cfg, mirror_name, mirror_config) + else: + if args.mirror_name in cfg["mirror"]: + cmd_mirror(cfg, args.mirror_name, cfg["mirror"][args.mirror_name]) + else: + raise ValueError( + "Requested mirror is not defined in config file: %s" + % (args.mirror_name) + ) + + +def cmd_mirror_create(cfg, mirror_name, mirror_config): + """Create a mirror create command to be ordered and executed later. + + :param cfg: The configuration yml as dict + :type cfg: dict + :param mirror_name: Name of the mirror to create + :type mirror_name: str + :param mirror_config: Configuration of the snapshot from the yml file. + :type mirror_config: dict""" + + if mirror_name in state_reader.state.mirrors: # pragma: no cover + return + + add_gpg_keys(mirror_config) + aptly_cmd = ["aptly", "mirror", "create"] + + if "sources" in mirror_config and mirror_config["sources"]: + aptly_cmd.append("-with-sources") + else: + aptly_cmd.append("-with-sources=false") + + if "udeb" in mirror_config and mirror_config["udeb"]: + aptly_cmd.append("-with-udebs") + + if "architectures" in mirror_config: + aptly_cmd.append( + "-architectures={0}".format( + ",".join(util.unit_or_list_to_list(mirror_config["architectures"])) + ) + ) + + aptly_cmd.append(mirror_name) + aptly_cmd.append(mirror_config["archive"]) + aptly_cmd.append(mirror_config["distribution"]) + aptly_cmd.extend(util.unit_or_list_to_list(mirror_config["components"])) + + lg.debug("Running command: %s", " ".join(aptly_cmd)) + subprocess.check_call(aptly_cmd) + + +def cmd_mirror_update(cfg, mirror_name, mirror_config): + """Create a mirror update command to be ordered and executed later. + + :param cfg: pyaptly config + :type cfg: dict + :param mirror_name: Name of the mirror to create + :type mirror_name: str + :param mirror_config: Configuration of the snapshot from the yml file. + :type mirror_config: dict""" + if mirror_name not in state_reader.state.mirrors: # pragma: no cover + raise Exception("Mirror not created yet") + add_gpg_keys(mirror_config) + aptly_cmd = ["aptly", "mirror", "update"] + if "max-tries" in mirror_config: + aptly_cmd.append("-max-tries=%d" % mirror_config["max-tries"]) + + aptly_cmd.append(mirror_name) + lg.debug("Running command: %s", " ".join(aptly_cmd)) + subprocess.check_call(aptly_cmd) diff --git a/pyaptly/publish.py b/pyaptly/publish.py new file mode 100644 index 0000000..b740cda --- /dev/null +++ b/pyaptly/publish.py @@ -0,0 +1,248 @@ +import datetime +import logging +import re + +from . import command, date_tools, snapshot, state_reader, util + +lg = logging.getLogger(__name__) + + +def publish(cfg, args): + """Creates publish commands, orders and executes them. + + :param cfg: The configuration yml as dict + :type cfg: dict + :param args: The command-line arguments read with :py:mod:`argparse` + :type args: namespace""" + lg.debug("Publishes to create / update: %s", cfg["publish"]) + + # aptly publish snapshot -components ... -architectures ... -distribution + # ... -origin Ubuntu trusty-stable ubuntu/stable + + publish_cmds = { + "create": publish_cmd_create, + "update": publish_cmd_update, + } + + cmd_publish = publish_cmds[args.task] + + if args.publish_name == "all": + commands = [ + cmd_publish(cfg, publish_name, publish_conf_entry) + for publish_name, publish_conf in cfg["publish"].items() + for publish_conf_entry in publish_conf + if publish_conf_entry.get("automatic-update", "false") is True + ] + + for cmd in command.Command.order_commands( + commands, state_reader.state.has_dependency + ): + cmd.execute() + + else: + if args.publish_name in cfg["publish"]: + commands = [ + cmd_publish(cfg, args.publish_name, publish_conf_entry) + for publish_conf_entry in cfg["publish"][args.publish_name] + ] + for cmd in command.Command.order_commands( + commands, state_reader.state.has_dependency + ): + cmd.execute() + else: + raise ValueError( + "Requested publish is not defined in config file: %s" + % (args.publish_name) + ) + + +def publish_cmd_update(cfg, publish_name, publish_config, ignore_existing=False): + """Creates a publish command with its dependencies to be ordered and + executed later. + + :param cfg: pyaptly config + :type cfg: dict + :param publish_name: Name of the publish to update + :type publish_name: str + :param publish_config: Configuration of the publish from the yml file. + :type publish_config: dict""" + + publish_cmd = ["aptly", "publish"] + options = [] + args = [publish_config["distribution"], publish_name] + + if "skip-contents" in publish_config and publish_config["skip-contents"]: + options.append("-skip-contents=true") + + if "repo" in publish_config: + publish_cmd.append("update") + return command.Command(publish_cmd + options + args) + + publish_fullname = "%s %s" % (publish_name, publish_config["distribution"]) + current_snapshots = state_reader.state.publish_map[publish_fullname] + if "snapshots" in publish_config: + snapshots_config = publish_config["snapshots"] + new_snapshots = [ + snapshot.snapshot_spec_to_name(cfg, snap) for snap in snapshots_config + ] + elif "publish" in publish_config: + conf_value = publish_config["publish"] + snapshots_config = [] + ref_publish_name, distribution = conf_value.split(" ") + for publish in cfg["publish"][ref_publish_name]: + if publish["distribution"] == distribution: + snapshots_config.extend(publish["snapshots"]) + break + new_snapshots = list(state_reader.state.publish_map[conf_value]) + else: # pragma: no cover + raise ValueError( + "No snapshot references configured in publish %s" % publish_name + ) + + if set(new_snapshots) == set(current_snapshots) and not ignore_existing: + # Already pointing to the newest snapshot, nothing to do + return + components = util.unit_or_list_to_list(publish_config["components"]) + + for snap in snapshots_config: + # snap may be a plain name or a dict.. + if hasattr(snap, "items"): + # Dict mode - only here can we even have an archive option + archive = snap.get("archive-on-update", None) + + if archive: + # Replace any timestamp placeholder with the current + # date/time. Note that this is NOT rounded, as we want to + # know exactly when the archival happened. + archive = archive.replace( + "%T", date_tools.format_timestamp(datetime.datetime.now()) + ) + if archive in state_reader.state.snapshots: # pragma: no cover + continue + prefix_to_search = re.sub("%T$", "", snap["name"]) + + current_snapshot = [ + snap_name + for snap_name in sorted(current_snapshots, key=lambda x: -len(x)) + if snap_name.startswith(prefix_to_search) + ][0] + + snapshot.clone_snapshot(current_snapshot, archive).execute() + + publish_cmd.append("switch") + options.append("-component=%s" % ",".join(components)) + + if "skip-contents" in publish_config and publish_config["skip-contents"]: + options.append("-skip-contents=true") + + return command.Command(publish_cmd + options + args + new_snapshots) + + +def publish_cmd_create(cfg, publish_name, publish_config, ignore_existing=False): + """Creates a publish command with its dependencies to be ordered and + executed later. + + :param cfg: pyaptly config + :type cfg: dict + :param publish_name: Name of the publish to create + :type publish_name: str + :param publish_config: Configuration of the publish from the yml file. + :type publish_config: dict""" + publish_fullname = "%s %s" % (publish_name, publish_config["distribution"]) + if publish_fullname in state_reader.state.publishes and not ignore_existing: + # Nothing to do, publish already created + return + + publish_cmd = ["aptly", "publish"] + options = [] + source_args = [] + endpoint_args = [publish_name] + + has_source = False + num_sources = 0 + + for conf, conf_value in publish_config.items(): + if conf == "skip-contents": + if conf_value: + options.append("-skip-contents=true") + elif conf == "architectures": # pragma: no cover + options.append( + "-architectures=%s" % ",".join(util.unit_or_list_to_list(conf_value)) + ) + elif conf == "components": + components = util.unit_or_list_to_list(conf_value) + options.append("-component=%s" % ",".join(components)) + elif conf == "label": # pragma: no cover + options.append("-label=%s" % conf_value) + elif conf == "origin": # pragma: no cover + options.append("-origin=%s" % conf_value) + + elif conf == "distribution": + options.append("-distribution=%s" % conf_value) + + elif conf == "gpg-key": + options.append("-gpg-key=%s" % conf_value) + elif conf == "automatic-update": + # Ignored here + pass + elif conf == "snapshots": + if has_source: # pragma: no cover + raise ValueError( + "Multiple sources for publish %s %s" + % (publish_name, publish_config) + ) + has_source = True + snapshots = util.unit_or_list_to_list(conf_value) + source_args.append("snapshot") + source_args.extend( + [ + snapshot.snapshot_spec_to_name(cfg, conf_value) + for conf_value in snapshots + ] + ) + + num_sources = len(snapshots) + + elif conf == "repo": + if has_source: # pragma: no cover + raise ValueError( + "Multiple sources for publish %s %s" + % (publish_name, publish_config) + ) + has_source = True + source_args = ["repo", conf_value] + num_sources = 1 + elif conf == "publish": + if has_source: # pragma: no cover + raise ValueError( + "Multiple sources for publish %s %s" + % (publish_name, publish_config) + ) + has_source = True + conf_value = " ".join(conf_value.split("/")) + source_args.append("snapshot") + try: + sources = state_reader.state.publish_map[conf_value] + except KeyError: + lg.critical( + ( + "Creating %s has been deferred, please call publish " + "create again" + ) + % publish_name + ) + return + source_args.extend(sources) + num_sources = len(sources) + else: # pragma: no cover + raise ValueError( + "Don't know how to handle publish config entry %s in %s" + % ( + conf, + publish_name, + ) + ) + assert has_source + assert len(components) == num_sources + + return command.Command(publish_cmd + options + source_args + endpoint_args) diff --git a/pyaptly/repo.py b/pyaptly/repo.py new file mode 100644 index 0000000..d1c44ea --- /dev/null +++ b/pyaptly/repo.py @@ -0,0 +1,86 @@ +import logging + +from . import command, state_reader, util + +lg = logging.getLogger(__name__) + + +def repo(cfg, args): + """Creates repository commands, orders and executes them. + + :param cfg: The configuration yml as dict + :type cfg: dict + :param args: The command-line arguments read with :py:mod:`argparse` + :type args: namespace""" + lg.debug("Repositories to create: %s", cfg["repo"]) + + repo_cmds = { + "create": repo_cmd_create, + } + + cmd_repo = repo_cmds[args.task] + + if args.repo_name == "all": + commands = [ + cmd_repo(cfg, repo_name, repo_conf) + for repo_name, repo_conf in cfg["repo"].items() + ] + + for cmd in command.Command.order_commands( + commands, state_reader.state.has_dependency + ): + cmd.execute() + + else: + if args.repo_name in cfg["repo"]: + commands = [cmd_repo(cfg, args.repo_name, cfg["repo"][args.repo_name])] + for cmd in command.Command.order_commands( + commands, state_reader.state.has_dependency + ): + cmd.execute() + else: + raise ValueError( + "Requested publish is not defined in config file: %s" % (args.repo_name) + ) + + +def repo_cmd_create(cfg, repo_name, repo_config): + """Create a repo create command to be ordered and executed later. + + :param cfg: pyaptly config + :type cfg: dict + :param repo_name: Name of the repo to create + :type repo_name: str + :param repo_config: Configuration of the repo from the yml file. + :type repo_config: dict""" + + if repo_name in state_reader.state.repos: # pragma: no cover + # Nothing to do, repo already created + return + + repo_cmd = ["aptly", "repo"] + options = [] + endpoint_args = ["create", repo_name] + + for conf, conf_value in repo_config.items(): + if conf == "architectures": + options.append( + "-architectures=%s" % ",".join(util.unit_or_list_to_list(conf_value)) + ) + elif conf == "component": + components = util.unit_or_list_to_list(conf_value) + options.append("-component=%s" % ",".join(components)) + elif conf == "comment": # pragma: no cover + options.append("-comment=%s" % conf_value) + elif conf == "distribution": + options.append("-distribution=%s" % conf_value) + else: # pragma: no cover + raise ValueError( + "Don't know how to handle repo config entry %s in %s" + % ( + conf, + repo_name, + ) + ) + + return command.Command(repo_cmd + options + endpoint_args) diff --git a/pyaptly/snapshot.py b/pyaptly/snapshot.py new file mode 100644 index 0000000..1ffb4b7 --- /dev/null +++ b/pyaptly/snapshot.py @@ -0,0 +1,386 @@ +import codecs +import datetime +import logging +from typing import Callable, Optional + +from . import command, date_tools, publish, state_reader, types + +lg = logging.getLogger(__name__) + +back_reference_map = { + "current": 0, + "previous": 1, +} + + +def snapshot(cfg, args): + """Creates snapshot commands, orders and executes them. + + :param cfg: The configuration yml as dict + :type cfg: dict + :param args: The command-line arguments read with :py:mod:`argparse` + :type args: namespace""" + lg.debug("Snapshots to create: %s", cfg["snapshot"].keys()) + + cmd_snapshot: types.SnapshotCommand = cmd_snapshot_update + if args.task == "create": + cmd_snapshot = cmd_snapshot_create + + if args.snapshot_name == "all": + commands = [ + cmd + for snapshot_name, snapshot_config in cfg["snapshot"].items() + for cmd in cmd_snapshot(cfg, snapshot_name, snapshot_config) + ] + + if args.debug: # pragma: no cover + dot_file = "/tmp/commands.dot" + with codecs.open(dot_file, "w", "UTF-8") as fh_dot: + fh_dot.write(command.Command.command_list_to_digraph(commands)) + lg.info("Wrote command dependency tree graph to %s", dot_file) + + if len(commands) > 0: + for cmd in command.Command.order_commands( + commands, state_reader.state.has_dependency + ): + cmd.execute() + + else: + if args.snapshot_name in cfg["snapshot"]: + commands = cmd_snapshot( + cfg, args.snapshot_name, cfg["snapshot"][args.snapshot_name] + ) + + if len(commands) > 0: + for cmd in command.Command.order_commands( + commands, state_reader.state.has_dependency + ): + cmd.execute() + + else: + raise ValueError( + "Requested snapshot is not defined in config file: %s" + % (args.snapshot_name) + ) + + +def snapshot_spec_to_name(cfg, snapshot): + """Converts a given snapshot short spec to a name. + + A short spec is a value that may either be a string or a dict. + + If it's a string, everything is fine and we just use that as + a snapshot name. + + However if it's a dict, we assume it has the following keys: + + * name: template for the snapshot + * timestamp: information on how to generate the timestamp. + + For further information regarding the timestamp's data structure, + consult the documentation of expand_timestamped_name(). + + :param cfg: Complete yaml config + :type cfg: dict + :param snapshot: Config of the snapshot + :type snapshot: dict + """ + delta = datetime.timedelta(seconds=1) + if hasattr(snapshot, "items"): + name = snapshot["name"] + if "timestamp" not in snapshot: + return name + + ts = snapshot["timestamp"] + back_ref = back_reference_map.get(ts) + if back_ref is None: + back_ref = int(ts) + reference = cfg["snapshot"][name] + + timestamp = datetime.datetime.now() + for _ in range(back_ref + 1): + timestamp = date_tools.round_timestamp(reference["timestamp"], timestamp) + timestamp -= delta + + timestamp += delta + return name.replace("%T", date_tools.format_timestamp(timestamp)) + else: # pragma: no cover + return snapshot + + +def dependents_of_snapshot(snapshot_name): + """Yield a flat list of dependents from the current state_reader.state. + + :rtype: generator""" + for dependent in state_reader.state.snapshot_map.get(snapshot_name, []): + yield dependent + for sub in dependents_of_snapshot(dependent): # pragma: no cover + yield dependent + + +def rotate_snapshot(cfg, snapshot_name): + """Creates a command to rotate a snapshot in order to be able to update a + current publish. + + :param cfg: pyaptly config + :type cfg: dict + :param snapshot_name: the snapshot to rotate + :type snapshot_name: str""" + rotated_name = cfg["snapshot"][snapshot_name].get( + "rotate_via", + "%s-rotated-%s" + % (snapshot_name, date_tools.format_timestamp(datetime.datetime.now())), + ) + + # First, verify that our snapshot environment is in a sane state_reader.state. + # Fixing the environment is not currently our task. + + if rotated_name in state_reader.state.snapshots: # pragma: no cover + raise Exception( + "Cannot update snapshot %s - rotated name %s already exists" + % (snapshot_name, rotated_name) + ) + + cmd = command.Command(["aptly", "snapshot", "rename", snapshot_name, rotated_name]) + + cmd.provide("virtual", rotated_name) + return cmd + + +def cmd_snapshot_update( + cfg: dict, snapshot_name: str, snapshot_config: dict +) -> list[command.Command]: + """Create commands to update all rotating snapshots. + + :param cfg: pyaptly config + :type cfg: dict + :param snapshot_name: Name of the snapshot to update/rotate + :type snapshot_name: str + :param snapshot_config: Configuration of the snapshot from the yml file. + :type snapshot_config: dict""" + + # To update a snapshot, we need to do roughly the following steps: + # 1) Rename the current snapshot and all snapshots that depend on it + # 2) Create new version of the snapshot and all snapshots that depend on it + # 3) Recreate all renamed snapshots + # 4) Update / switch-over publishes + # 5) Remove the rotated temporary snapshots + + if "%T" in snapshot_name: # pragma: no cover + # Timestamped snapshots are never rotated by design. + return [] + + affected_snapshots = [snapshot_name] + affected_snapshots.extend(list(dependents_of_snapshot(snapshot_name))) + + # TODO: rotated snapshots should be identified by configuration option, not + # just by "not being timestamped + + rename_cmds = [rotate_snapshot(cfg, snap) for snap in affected_snapshots] + + # The "intermediate" command causes the state reader to refresh. At the + # same time, it provides a collection point for dependency handling. + intermediate = command.FunctionCommand(state_reader.state.read) + intermediate.provide("virtual", "all-snapshots-rotated") + + for cmd in rename_cmds: + # Ensure that our "intermediate" pseudo command comes after all + # the rename commands, by ensuring it depends on all their "virtual" + # provided items. + cmd_vprovides = [ + provide for ptype, provide in cmd.get_provides() if ptype == "virtual" + ] + for provide in cmd_vprovides: + intermediate.require("virtual", provide) + + # Same as before - create a focal point to "collect" dependencies + # after the snapshots have been rebuilt. Also reload state once again + intermediate2 = command.FunctionCommand(state_reader.state.read) + intermediate2.provide("virtual", "all-snapshots-rebuilt") + + create_cmds = [] + for _ in affected_snapshots: + # Well.. there's normally just one, but since we need interface + # consistency, cmd_snapshot_create() returns a list. And since it + # returns a list, we may just as well future-proof it and loop instead + # of assuming it's going to be a single entry (and fail horribly if + # this assumption changes in the future). + for create_cmd in cmd_snapshot_create( + cfg, snapshot_name, cfg["snapshot"][snapshot_name], ignore_existing=True + ): + # enforce cmd to run after the refresh, and thus also + # after all the renames + create_cmd.require("virtual", "all-snapshots-rotated") + + # Evil hack - we must do the dependencies ourselves, to avoid + # getting a circular graph + create_cmd._requires = set( + [ + (type_, req) + for type_, req in create_cmd._requires + if type_ != "snapshot" + ] + ) + + create_cmd.provide("virtual", "readyness-for-%s" % snapshot_name) + for follower in dependents_of_snapshot(snapshot_name): + create_cmd.require("virtual", "readyness-for-%s" % follower) + + # "Focal point" - make intermediate2 run after all the commands + # that re-create the snapshots + create_cmd.provide("virtual", "rebuilt-%s" % snapshot_name) + intermediate2.require("virtual", "rebuilt-%s" % snapshot_name) + + create_cmds.append(create_cmd) + + # At this point, snapshots have been renamed, then recreated. + # After each of the steps, the system state has been re-read. + # So now, we're left with updating the publishes. + + def is_publish_affected(name, publish_info): + if ( + "%s %s" % (name, publish_info["distribution"]) + in state_reader.state.publishes + ): + try: + for snap in publish_info["snapshots"]: + snap_name = snapshot_spec_to_name(cfg, snap) + if snap_name in affected_snapshots: + return True + except KeyError: # pragma: no cover + lg.debug( + ( + "publish_info endpoint %s is not affected because it has no " + "snapshots defined" + ) + % name + ) + return False + return False + + if "publish" in cfg: + all_publish_commands = [ + publish.publish_cmd_update( + cfg, publish_name, publish_conf_entry, ignore_existing=True + ) + for publish_name, publish_conf in cfg["publish"].items() + for publish_conf_entry in publish_conf + if publish_conf_entry.get("automatic-update", "false") is True + if is_publish_affected(publish_name, publish_conf_entry) + ] + else: + all_publish_commands = [] + + republish_cmds = [c for c in all_publish_commands if c] + + # Ensure that the republish commands run AFTER the snapshots are rebuilt + for cmd in republish_cmds: + cmd.require("virtual", "all-snapshots-rebuilt") + + # TODO: + # - We need to cleanup all the rotated snapshots after the publishes are + # rebuilt + # - Filter publishes, so only the non-timestamped publishes are rebuilt + + return rename_cmds + create_cmds + republish_cmds + [intermediate, intermediate2] + + +def cmd_snapshot_create( + cfg: dict, + snapshot_name: str, + snapshot_config: dict, + ignore_existing: Optional[bool] = False, +) -> list[command.Command]: + """Create a snapshot create command to be ordered and executed later. + + :param cfg: pyaptly config + :type cfg: dict + :param snapshot_name: Name of the snapshot to create + :type snapshot_name: str + :param snapshot_config: Configuration of the snapshot from the yml file. + :type snapshot_config: dict + :param ignore_existing: Optional, defaults to False. If set to True, still + return a command object even if the requested + snapshot already exists + :type ignore_existing: dict + + :rtype: command.Command + """ + + # TODO: extract possible timestamp component + # and generate *actual* snapshot name + + snapshot_name = date_tools.expand_timestamped_name(snapshot_name, snapshot_config) + + if snapshot_name in state_reader.state.snapshots and not ignore_existing: + return [] + + default_aptly_cmd = ["aptly", "snapshot", "create"] + default_aptly_cmd.append(snapshot_name) + default_aptly_cmd.append("from") + + if "mirror" in snapshot_config: + cmd = command.Command(default_aptly_cmd + ["mirror", snapshot_config["mirror"]]) + cmd.provide("snapshot", snapshot_name) + cmd.require("mirror", snapshot_config["mirror"]) + return [cmd] + + elif "repo" in snapshot_config: + cmd = command.Command(default_aptly_cmd + ["repo", snapshot_config["repo"]]) + cmd.provide("snapshot", snapshot_name) + cmd.require("repo", snapshot_config["repo"]) + return [cmd] + + elif "filter" in snapshot_config: + cmd = command.Command( + [ + "aptly", + "snapshot", + "filter", + snapshot_spec_to_name(cfg, snapshot_config["filter"]["source"]), + snapshot_name, + snapshot_config["filter"]["query"], + ] + ) + cmd.provide("snapshot", snapshot_name) + cmd.require( + "snapshot", snapshot_spec_to_name(cfg, snapshot_config["filter"]["source"]) + ) + return [cmd] + + elif "merge" in snapshot_config: + cmd = command.Command( + [ + "aptly", + "snapshot", + "merge", + snapshot_name, + ] + ) + cmd.provide("snapshot", snapshot_name) + + for source in snapshot_config["merge"]: + source_name = snapshot_spec_to_name(cfg, source) + cmd.append(source_name) + cmd.require("snapshot", source_name) + + return [cmd] + + else: # pragma: no cover + raise ValueError( + "Don't know how to handle snapshot config: %s" % (snapshot_config) + ) + + +def clone_snapshot(origin, destination): + """Creates a clone snapshot command with dependencies to be ordered and + executed later. + + :param origin: The snapshot to clone + :type origin: str + :param destination: The new name of the snapshot + :type destination: str""" + cmd = command.Command(["aptly", "snapshot", "merge", destination, origin]) + cmd.provide("snapshot", destination) + cmd.require("snapshot", origin) + return cmd diff --git a/pyaptly/state_reader.py b/pyaptly/state_reader.py new file mode 100644 index 0000000..1f3be7e --- /dev/null +++ b/pyaptly/state_reader.py @@ -0,0 +1,170 @@ +import logging +import re + +from . import main + +lg = logging.getLogger(__name__) + + +class SystemStateReader(object): + """Reads the state from aptly and gpg to find out what operations have to + be performed to reach the state defined in the yml config-file. + """ + + known_dependency_types = ("repo", "snapshot", "mirror", "gpg_key") + + def __init__(self): + self.gpg_keys = set() + self.mirrors = set() + self.repos = set() + self.snapshots = set() + self.snapshot_map = {} + self.publishes = set() + self.publish_map = {} + + def _extract_sources(self, data): + """ + Extract sources from data. + + Data needs to be in following format: + Name: test-snap + Description: some description + Sources: + test-snap-base [snapshot] + """ + entered_sources = False + sources = [] + for line in data.split("\n"): + # source line need to start with two spaces + if entered_sources and line[0:2] != " ": + break + + if entered_sources: + sources.append(line) + + if line == "Sources:": + entered_sources = True + + return sources + + def read(self): + """Reads all available system states.""" + self.read_gpg() + self.read_repos() + self.read_mirror() + self.read_snapshot() + self.read_snapshot_map() + self.read_publishes() + self.read_publish_map() + + def read_gpg(self): + """Read all trusted keys in gpg.""" + self.gpg_keys = set() + cmd = [ + "gpg", + "--no-default-keyring", + "--keyring", + "trustedkeys.gpg", + "--list-keys", + "--with-colons", + ] + data, _ = main.call_output(cmd) + lg.debug("GPG returned: %s", data) + for line in data.split("\n"): + field = line.split(":") + if field[0] in ("pub", "sub"): + key = field[4] + key_short = key[8:] + self.gpg_keys.add(key) + self.gpg_keys.add(key_short) + + def read_publish_map(self): + """Create a publish map. publish -> snapshots""" + self.publish_map = {} + # match example: main: test-snapshot [snapshot] + re_snap = re.compile(r"\s+[\w\d-]+\:\s([\w\d-]+)\s\[snapshot\]") + for publish in self.publishes: + prefix, dist = publish.split(" ") + data, _ = main.call_output(["aptly", "publish", "show", dist, prefix]) + + sources = self._extract_sources(data) + matches = [re_snap.match(source) for source in sources] + snapshots = [match.group(1) for match in matches if match] + self.publish_map[publish] = set(snapshots) + + lg.debug("Joined snapshots and publishes: %s", self.publish_map) + + def read_snapshot_map(self): + """Create a snapshot map. snapshot -> snapshots. This is also called + merge-tree.""" + self.snapshot_map = {} + # match example: test-snapshot [snapshot] + re_snap = re.compile(r"\s+([\w\d-]+)\s\[snapshot\]") + for snapshot_outer in self.snapshots: + data, _ = main.call_output(["aptly", "snapshot", "show", snapshot_outer]) + sources = self._extract_sources(data) + matches = [re_snap.match(source) for source in sources] + snapshots = [match.group(1) for match in matches if match] + self.snapshot_map[snapshot_outer] = set(snapshots) + + lg.debug("Joined snapshots with self(snapshots): %s", self.snapshot_map) + + def read_publishes(self): + """Read all available publishes.""" + self.publishes = set() + self.read_aptly_list("publish", self.publishes) + + def read_repos(self): + """Read all available repos.""" + self.repos = set() + self.read_aptly_list("repo", self.repos) + + def read_mirror(self): + """Read all available mirrors.""" + self.mirrors = set() + self.read_aptly_list("mirror", self.mirrors) + + def read_snapshot(self): + """Read all available snapshots.""" + self.snapshots = set() + self.read_aptly_list("snapshot", self.snapshots) + + def read_aptly_list(self, type_, list_): + """Generic method to read lists from aptly. + + :param type_: The type of list to read ie. snapshot + :type type_: str + :param list_: Read into this list + :param list_: list""" + data, _ = main.call_output(["aptly", type_, "list", "-raw"]) + lg.debug("Aptly returned %s: %s", type_, data) + for line in data.split("\n"): + clean_line = line.strip() + if clean_line: + list_.add(clean_line) + + def has_dependency(self, dependency): + """Check system state dependencies. + + :param dependency: The dependency to check + :type dependency: list""" + type_, name = dependency + + if type_ == "repo": # pragma: no cover + return name in self.repos + if type_ == "mirror": # pragma: no cover + return name in self.mirrors + elif type_ == "snapshot": + return name in self.snapshots # pragma: no cover + elif type_ == "gpg_key": # pragma: no cover + return name in self.gpg_keys # Not needed ATM + elif type_ == "virtual": + # virtual dependencies can never be resolved by the + # system state reader - they are used for internal + # ordering only + return False + else: + raise ValueError("Unknown dependency to resolve: %s" % str(dependency)) + + +state = SystemStateReader() diff --git a/pyaptly/test.py b/pyaptly/test.py deleted file mode 100644 index 4e62c42..0000000 --- a/pyaptly/test.py +++ /dev/null @@ -1,157 +0,0 @@ -# type: ignore # TODO -# flake8: noqa # TODO - -"""Tools for testing pyaptly""" - -import codecs -import contextlib -import json -import os -import shutil -import subprocess -import sys -import tempfile -from pathlib import Path - -import freezegun -import pytest -import six -import yaml - -import pyaptly.legacy as pyaptly - -aptly_conf = Path.home().absolute() / ".aptly.conf" - -hypothesis_min_ver = pytest.mark.skipif( - sys.version_info < (2, 7), reason="requires python2.7" -) - -if six.PY2: # pragma: no cover - environb = os.environ -else: - environb = os.environb # pragma: no cover - - -def read_yml(file_): - """Read and merge a yml file. - - :param file_: file to read - :type file_: str""" - directory = os.path.dirname(file_) - with codecs.open(file_, encoding="UTF-8") as f: - main_yml = dict(yaml.safe_load(f.read())) - merges = [] - if "merge" in main_yml: - for merge_path in main_yml["merge"]: - path = os.path.join( - directory, - merge_path.encode("UTF-8"), - ) - merges.append(read_yml(path)) - del main_yml["merge"] - for merge_struct in merges: - main_yml = merge(main_yml, merge_struct) - return main_yml - - -def merge(a, b): - """Merge two dicts. - - :param a: dict a - :type a: dict - :param b: dict b - :type b: dict - :rtype: dict - """ - if isinstance(a, dict) and isinstance(b, dict): - d = dict(a) - d.update(dict(((k, merge(a.get(k, None), b[k])) for k in b))) - for k, v in list(d.items()): - if v == "None": - del d[k] - return d - return b - - -def execute_and_parse_show_cmd(args): # pragma: no cover - """Executes and parses a aptly show command. - - :param args: Command to execute - :type args: list - """ - result = {} - show, _ = pyaptly.call_output(args) - for line in show.split("\n"): - if ":" in line: - key, value = line.split(":", 1) - key = key.lower() - result[key] = value.strip() - return result - - -def create_config(test_input): - """Returns path to pyaptly config from test input. - - Test input should be minimal and extended/tranformed in create_config. - - :param test_input: Test input read from test-yml. - :type test_input: dict - :rtype: (dict, str) - """ - input_ = read_yml(test_input) - if "mirror" in input_: - for mirror in input_["mirror"].values(): - if "components" not in mirror: - mirror["components"] = "main" - if "distribution" not in mirror: - mirror["distribution"] = "main" - if "publish" in input_: # pragma: no cover - for publish in input_["publish"].values(): - for item in publish: - if "components" not in item: - item["components"] = "main" - if "distribution" not in item: - item["distribution"] = "main" - try: - file_ = codecs.getwriter("UTF-8")(tempfile.NamedTemporaryFile(delete=False)) - yaml.dump(input_, file_) - finally: - file_.close() - return (input_, file_.name) - - -@contextlib.contextmanager -def clean_and_config(test_input, freeze="2012-10-10 10:10:10", sign=False): - """Remove aptly file and create a input config file to run pyaptly with. - - Test input should be minimal and extended/tranformed in create_config. - """ - tempdir_obj = tempfile.TemporaryDirectory() - tempdir = Path(tempdir_obj.name).absolute() - - aptly = tempdir / "aptly" - aptly.mkdir(parents=True) - config = {"rootDir": str(aptly)} - if aptly_conf.exists(): # pragma: no cover - aptly_conf.unlink() - with aptly_conf.open("w") as f: - json.dump(config, f) - - gnupg = tempdir / "gnugp" - gnupg.mkdir(parents=True) - os.chown(gnupg, 0, 0) - gnupg.chmod(0o700) - environb[b"GNUPGHOME"] = str(gnupg).encode("UTF-8") - - if sign: # pragma: no cover - setup = Path("/setup") - subprocess.run(["gpg", "--import", setup / "test03.pub"], check=True) - subprocess.run(["gpg", "--import", setup / "test03.key"], check=True) - - input_, file_ = create_config(test_input) - try: - with freezegun.freeze_time(freeze): - yield (input_, file_) - finally: - tempdir_obj.cleanup() - aptly_conf.unlink() diff --git a/pyaptly/test_test.py b/pyaptly/test_test.py deleted file mode 100644 index fd4a9b4..0000000 --- a/pyaptly/test_test.py +++ /dev/null @@ -1,96 +0,0 @@ -# type: ignore # TODO -# flake8: noqa # TODO - -"""Testing the testing tools""" - -import os -import random -import sys -import unittest - -from . import test - -if not sys.version_info < (2, 7): # pragma: no cover - import hypothesis.strategies as st - from hypothesis import example, given # noqa - - -if sys.version_info < (2, 7): # pragma: no cover - import mock - - given = mock.MagicMock() # noqa - example = mock.MagicMock() # noqa - st = mock.MagicMock() # noqa - -_test_base = os.path.dirname(os.path.abspath(__file__)).encode("UTF-8") - -yml_st = st.recursive( - st.floats(-1, 1) | st.booleans() | st.text() | st.none() | st.binary(), - lambda children: st.lists(children, max_size=10) - | st.dictionaries(st.text(), children, max_size=10), - max_leaves=30, -) - - -class TestTest(unittest.TestCase): - def test_read_yml(self): - """Test if reading yml files works without errors.""" - path = os.path.join(_test_base, b"merge.yml") - yml = test.read_yml(path) - assert yml["mirror"]["fakerepo01"] is not None - - def test_delete(self): - """Test if merges can delete fields""" - path = os.path.join(_test_base, b"delete_merge.yml") - yml = test.read_yml(path) - assert "fakerepo01" not in yml["mirror"] - - @test.hypothesis_min_ver - @given(yml_st, yml_st, st.random_module()) - @example({"1": "Huhu"}, {"1": "None"}, st.random_module()) - def test_merge(self, a, b, rand): # pragma: no cover - """Test if merge has the expected result.""" - res = test.merge(a, b) - for _ in range(10): - path, data_b = self.rand_path(b) - if data_b == "None": - error = False - try: - data_res = self.get_path(path, res) - except KeyError: - error = True - assert error - else: - data_res = self.get_path(path, res) - assert data_res == data_b - if isinstance(a, dict) and isinstance(b, dict): - path, data_a = self.rand_path(a) - try: - data_res = self.get_path(path, res) - if data_a != data_res: # pragma: no cover - data_b = self.get_path(path, b) - assert data_res == data_b - except (TypeError, KeyError): - pass - - def get_path(self, path, data): # pragma: no cover - for i in path: - data = data[i] - if isinstance(data, dict): - return None - return data - - def rand_path(self, data): # pragma: no cover - path = [] - while True: - if isinstance(data, dict): - keys = list(data.keys()) - if keys: - k = random.choice(list(data.keys())) - path.append(k) - data = data[k] - else: - return path, None - else: - break - return path, data diff --git a/pyaptly/tests/publish-previous.toml b/pyaptly/tests/publish-previous.toml new file mode 100644 index 0000000..54326bc --- /dev/null +++ b/pyaptly/tests/publish-previous.toml @@ -0,0 +1,34 @@ +[mirror.fakerepo01] +max-tries = 2 +archive = "http://localhost:3123/fakerepo01" +gpg-keys = [ "2841988729C7F3FF",] +components = "main" +distribution = "main" + +[mirror.fakerepo02] +archive = "http://localhost:3123/fakerepo02" +gpg-keys = [ "2841988729C7F3FF",] +components = "main" +distribution = "main" + +[snapshot."fakerepo01-%T"] +mirror = "fakerepo01" + +[snapshot."fakerepo02-%T"] +mirror = "fakerepo02" + +[snapshot."fakerepo01-%T".timestamp] +time = "00:00" + +[snapshot."fakerepo02-%T".timestamp] +time = "00:00" +repeat-weekly = "sat" + +[snapshot."superfake-%T"] +merge = [ + { name = "fakerepo01-%T", timestamp = "previous" }, + { name = "fakerepo02-%T", timestamp = 0 }, +] + +[snapshot."superfake-%T".timestamp] +time = "00:00" diff --git a/pyaptly/dateround_test.py b/pyaptly/tests/test_dateround.py similarity index 67% rename from pyaptly/dateround_test.py rename to pyaptly/tests/test_dateround.py index 755ec32..a50e797 100644 --- a/pyaptly/dateround_test.py +++ b/pyaptly/tests/test_dateround.py @@ -1,52 +1,28 @@ -# type: ignore # TODO -# flake8: noqa # TODO - """Dateround tests""" import datetime import os.path -import sys - -from . import test -from .legacy import ( - date_round_daily, - date_round_weekly, - iso_to_gregorian, - snapshot_spec_to_name, - time_delta_helper, - time_remove_tz, -) - -_test_base = os.path.dirname(os.path.abspath(__file__)).encode("UTF-8") - -if not sys.version_info < (2, 7): # pragma: no cover - from hypothesis import given # noqa - from hypothesis.strategies import integers # noqa - from hypothesis.strategies import datetimes, times # noqa +import pytest +import yaml +from hypothesis import given +from hypothesis.strategies import datetimes, integers, times +from .. import date_tools, snapshot -if sys.version_info < (2, 7): # pragma: no cover - import mock - - given = mock.MagicMock() # noqa - datetimes = mock.MagicMock() # noqa - times = mock.MagicMock() # noqa - integers = mock.MagicMock() # noqa +_test_base = os.path.dirname(os.path.abspath(__file__)).encode("UTF-8") -@test.hypothesis_min_ver @given(datetimes()) def test_is_to_gregorian(date): # pragma: no cover """Test if a roundtrip of isoclander() -> iso_to_gregorian() is correct""" iso_tuple = date.isocalendar() - new_date = iso_to_gregorian(*iso_tuple) + new_date = date_tools.iso_to_gregorian(*iso_tuple) assert date.year == new_date.year assert date.month == new_date.month assert date.day == new_date.day -@test.hypothesis_min_ver @given( datetimes(min_value=datetime.datetime(year=2, month=1, day=1)), integers(min_value=1, max_value=7), @@ -54,8 +30,8 @@ def test_is_to_gregorian(date): # pragma: no cover ) def test_round_weekly(date, day_of_week, time): # pragma: no cover """Test if the round function rounds the expected delta""" - time = time_remove_tz(time) - round_date = date_round_weekly(date, day_of_week, time) + time = date_tools.time_remove_tz(time) + round_date = date_tools.date_round_weekly(date, day_of_week, time) date_time = datetime.time( hour=date.hour, minute=date.minute, @@ -63,7 +39,7 @@ def test_round_weekly(date, day_of_week, time): # pragma: no cover microsecond=date.microsecond, ) # double round - assert round_date == date_round_weekly(round_date, day_of_week, time) + assert round_date == date_tools.date_round_weekly(round_date, day_of_week, time) if round_date == date: # pragma: no cover # Find tz problems assert date_time == time @@ -81,10 +57,10 @@ def test_round_weekly(date, day_of_week, time): # pragma: no cover assert round_date.isoweekday() == day_of_week # Expected delta date_delta = date - round_date - date_day_time_delta = time_delta_helper(date_time) + datetime.timedelta( - days=date.weekday() - ) - given_day_time_delta = time_delta_helper(time) + datetime.timedelta( + date_day_time_delta = date_tools.time_delta_helper( + date_time + ) + datetime.timedelta(days=date.weekday()) + given_day_time_delta = date_tools.time_delta_helper(time) + datetime.timedelta( days=day_of_week - 1 ) delta = date_day_time_delta - given_day_time_delta @@ -107,7 +83,7 @@ def test_weekly_examples(): ) time = datetime.time(hour=23, minute=0) day_of_week = 2 - rounded = date_round_weekly(date, day_of_week, time) + rounded = date_tools.date_round_weekly(date, day_of_week, time) assert datetime.datetime(2015, 10, 27, 23, 0) == rounded date = datetime.datetime( year=2015, @@ -116,16 +92,15 @@ def test_weekly_examples(): hour=23, minute=1, ) - rounded = date_round_weekly(date, day_of_week, time) + rounded = date_tools.date_round_weekly(date, day_of_week, time) assert datetime.datetime(2015, 11, 3, 23, 0) == rounded -@test.hypothesis_min_ver @given(datetimes(), times()) def test_round_daily(date, time): # pragma: no cover """Test if the round function rounds the expected delta""" - time = time_remove_tz(time) - round_date = date_round_daily(date, time) + time = date_tools.time_remove_tz(time) + round_date = date_tools.date_round_daily(date, time) date_time = datetime.time( hour=date.hour, minute=date.minute, @@ -133,7 +108,7 @@ def test_round_daily(date, time): # pragma: no cover microsecond=date.microsecond, ) # double round - assert round_date == date_round_daily(round_date, time) + assert round_date == date_tools.date_round_daily(round_date, time) if round_date == date: # pragma: no cover # Find tz problems assert date_time == time @@ -149,7 +124,9 @@ def test_round_daily(date, time): # pragma: no cover assert round_date.microsecond == time.microsecond # Expected delta date_delta = date - round_date - time_delta = time_delta_helper(date_time) - time_delta_helper(time) + time_delta = date_tools.time_delta_helper( + date_time + ) - date_tools.time_delta_helper(time) if date_time > time: assert date_delta == time_delta else: @@ -168,10 +145,10 @@ def test_daily_examples(): minute=34, ) time = datetime.time(hour=23, minute=00) - rounded = date_round_daily(date, time) + rounded = date_tools.date_round_daily(date, time) assert datetime.datetime(2015, 9, 30, 23, 0) == rounded time = datetime.time(hour=11, minute=00) - rounded = date_round_daily(date, time) + rounded = date_tools.date_round_daily(date, time) assert datetime.datetime(2015, 10, 1, 11, 0) == rounded date = datetime.datetime( year=2015, @@ -180,7 +157,7 @@ def test_daily_examples(): hour=10, minute=59, ) - rounded = date_round_daily(date, time) + rounded = date_tools.date_round_daily(date, time) assert datetime.datetime(2015, 9, 30, 11, 0) == rounded date = datetime.datetime( year=2015, @@ -189,21 +166,18 @@ def test_daily_examples(): hour=11, minute=1, ) - rounded = date_round_daily(date, time) + rounded = date_tools.date_round_daily(date, time) assert datetime.datetime(2015, 10, 1, 11, 0) == rounded -def test_snapshot_spec_to_name(): - with test.clean_and_config( - os.path.join( - _test_base, - b"publish-previous.yml", - ) - ) as (tyml, config): - snaps = tyml["snapshot"]["superfake-%T"]["merge"] +@pytest.mark.parametrize("config", ["publish-previous.toml"], indirect=True) +def test_snapshot_spec_to_name(config, test_path, freeze): + with (test_path / config).open("r") as f: + tyml = yaml.load(f, Loader=yaml.FullLoader) + snaps = tyml["snapshot"]["superfake-%T"]["merge"] - rounded1 = snapshot_spec_to_name(tyml, snaps[0]) - rounded2 = snapshot_spec_to_name(tyml, snaps[1]) + rounded1 = snapshot.snapshot_spec_to_name(tyml, snaps[0]) + rounded2 = snapshot.snapshot_spec_to_name(tyml, snaps[1]) - assert rounded1 == "fakerepo01-20121009T0000Z" - assert rounded2 == "fakerepo02-20121006T0000Z" + assert rounded1 == "fakerepo01-20121009T0000Z" + assert rounded2 == "fakerepo02-20121006T0000Z" diff --git a/pyaptly/tests/test_graph.py b/pyaptly/tests/test_graph.py index 0589c15..46af2ac 100644 --- a/pyaptly/tests/test_graph.py +++ b/pyaptly/tests/test_graph.py @@ -1,11 +1,12 @@ """Testing dependency graphs.""" import random from functools import partial +from typing import Union from hypothesis import given, settings from hypothesis import strategies as st -from pyaptly.legacy import Command, FunctionCommand # type: ignore +from .. import command # Disable the deadline globally for all tests settings.register_profile("my_profile", deadline=None) @@ -103,6 +104,7 @@ def run_graph(tree): commands = [] index = list(range(len(tree[0]))) random.shuffle(index) + cmd: Union[command.Command, command.FunctionCommand] for i in index: def dummy(i): # pragma: no cover @@ -111,17 +113,17 @@ def dummy(i): # pragma: no cover if tree[2][i]: func = partial(dummy, i) func.__name__ = dummy.__name__ # type: ignore - cmd = FunctionCommand(func) + cmd = command.FunctionCommand(func) else: - cmd = Command(i) + cmd = command.Command([str(i)]) for provides in tree[0][i]: cmd.provide("virtual", provides) for requires in tree[1][i]: cmd.require("virtual", requires) commands.append(cmd) - ordered = Command.order_commands(commands) + ordered = command.Command.order_commands(commands) assert len(commands) == len(ordered) provided: set[tuple[str, str]] = set() - for command in ordered: - assert command._requires.issubset(provided) - provided.update(command._provides) + for cmd in ordered: + assert cmd._requires.issubset(provided) + provided.update(cmd._provides) diff --git a/pyaptly/helpers_test.py b/pyaptly/tests/test_helpers.py similarity index 81% rename from pyaptly/helpers_test.py rename to pyaptly/tests/test_helpers.py index bb77f22..04c788f 100644 --- a/pyaptly/helpers_test.py +++ b/pyaptly/tests/test_helpers.py @@ -1,14 +1,12 @@ -# type: ignore # TODO -# flake8: noqa # TODO - """Testing testing helper functions""" import subprocess -from pyaptly.legacy import Command, SystemStateReader, call_output +from .. import command, main, state_reader def test_call_output_error(): """Test if call_output raises errors correctly""" + # TDOD remove args = [ "bash", "-c", @@ -16,7 +14,7 @@ def test_call_output_error(): ] error = False try: - call_output(args) + main.call_output(args) except subprocess.CalledProcessError as e: assert e.returncode == 42 error = True @@ -25,7 +23,7 @@ def test_call_output_error(): def test_command_dependency_fail(): """Test if bad dependencies fail correctly.""" - a = Command(["ls"]) + a = command.Command(["ls"]) error = False try: a.require("turbo", "banana") @@ -36,7 +34,7 @@ def test_command_dependency_fail(): def test_dependency_callback_file(): """Test if bad dependencies fail correctly.""" - state = SystemStateReader() + state = state_reader.SystemStateReader() try: state.has_dependency(["turbo", "banana"]) except ValueError as e: diff --git a/pyaptly/tests/test_mirror.py b/pyaptly/tests/test_mirror.py index a90ddf9..b0bf5f6 100644 --- a/pyaptly/tests/test_mirror.py +++ b/pyaptly/tests/test_mirror.py @@ -3,7 +3,7 @@ import pytest -import pyaptly +from .. import main, state_reader @pytest.mark.parametrize("config", ["debug.toml"], indirect=True) @@ -16,14 +16,14 @@ def test_debug(environment, config): "mirror", "create", ] - pyaptly.main(args) + main.main(args) assert logging.getLogger().level == logging.DEBUG @pytest.mark.parametrize("config", ["mirror-extra.toml"], indirect=True) def test_mirror_create(environment, config, caplog): """Test if creating mirrors works.""" - pyaptly.main(["-c", config, "mirror", "create"]) + main.main(["-c", config, "mirror", "create"]) keys_added = [] for rec in caplog.records: for arg in rec.args: @@ -33,7 +33,7 @@ def test_mirror_create(environment, config, caplog): assert len(keys_added) > 0 assert len(keys_added) == len(set(keys_added)), "Key multiple times added" - state = pyaptly.SystemStateReader() + state = state_reader.SystemStateReader() state.read() assert state.mirrors == {"fakerepo03"} @@ -50,7 +50,7 @@ def test_mirror_update_inexistent(config, mirror_update): args = ["-c", config, "mirror", "update", "asdfasdf"] error = False try: - pyaptly.main(args) + main.main(args) except ValueError: error = True assert error @@ -60,4 +60,4 @@ def test_mirror_update_inexistent(config, mirror_update): def test_mirror_update_single(config, mirror_update): """Test if updating a single mirror works.""" args = ["-c", config, "mirror", "update", "fakerepo01"] - pyaptly.main(args) + main.main(args) diff --git a/pyaptly/tests/test_publish.py b/pyaptly/tests/test_publish.py index 1c08228..af15561 100644 --- a/pyaptly/tests/test_publish.py +++ b/pyaptly/tests/test_publish.py @@ -1,7 +1,7 @@ """Test publish functionality.""" import pytest -import pyaptly +from .. import command, main, state_reader @pytest.mark.parametrize("repo", ["fakerepo01", "asdfasdf"]) @@ -11,10 +11,10 @@ def test_publish_create_single(config, snapshot_create, test_key_03, repo): args = ["-c", config, "publish", "create", repo] if repo == "asdfasdf": with pytest.raises(ValueError): - pyaptly.main(args) + main.main(args) return - pyaptly.main(args) - state = pyaptly.SystemStateReader() + main.main(args) + state = state_reader.SystemStateReader() state.read() assert set(["fakerepo01 main"]) == state.publishes expect = {"fakerepo01 main": set(["fakerepo01-20121010T0000Z"])} @@ -27,7 +27,7 @@ def test_publish_create_inexistent(config, snapshot_create, test_key_03): args = ["-c", config, "publish", "create", "asdfasdf"] error = False try: - pyaptly.main(args) + main.main(args) except ValueError: error = True assert error @@ -50,12 +50,12 @@ def test_pretend(config, snapshot_create, test_key_03): "create", "fakerepo01", ] - pyaptly.main(args) - state = pyaptly.SystemStateReader() + main.main(args) + state = state_reader.SystemStateReader() state.read() assert set() == state.publishes assert {} == state.publish_map - assert pyaptly.Command.pretend_mode + assert command.Command.pretend_mode @pytest.mark.parametrize("config", ["publish-repo.toml"], indirect=True) @@ -67,15 +67,15 @@ def test_publish_create_repo(config, repo_create): "publish", "create", ] - pyaptly.main(args) + main.main(args) args = [ "-c", config, "publish", "update", ] - pyaptly.main(args) - state = pyaptly.SystemStateReader() + main.main(args) + state = state_reader.SystemStateReader() state.read() assert set(["centrify latest"]) == state.publishes assert {"centrify latest": set([])} == state.publish_map @@ -93,8 +93,8 @@ def test_publish_update_rotating(config, freeze, publish_create_rotating, via): """Test if update rotating publishes works.""" freeze.move_to("2012-10-11 10:10:10") args = ["-c", config, via, "update"] - pyaptly.main(args) - state = pyaptly.SystemStateReader() + main.main(args) + state = state_reader.SystemStateReader() state.read() expect = { "fake/current stable": set(["fake-current"]), @@ -134,10 +134,10 @@ def test_publish_update_republish(config, publish_create_republish, freeze): """Test if update republishes works.""" freeze.move_to("2012-10-11 10:10:10") args = ["-c", config, "snapshot", "create"] - pyaptly.main(args) + main.main(args) args = ["-c", config, "publish", "update"] - pyaptly.main(args) - state = pyaptly.SystemStateReader() + main.main(args) + state = state_reader.SystemStateReader() state.read() assert "fakerepo01-stable main" in state.publishes # As you see fakerepo01-stable main points to the old snapshot @@ -159,10 +159,10 @@ def test_publish_updating_basic(config, publish_create, freeze): """Test if updating publishes works.""" freeze.move_to("2012-10-11 10:10:10") args = ["-c", config, "snapshot", "create"] - pyaptly.main(args) + main.main(args) args = ["-c", config, "publish", "update"] - pyaptly.main(args) - state = pyaptly.SystemStateReader() + main.main(args) + state = state_reader.SystemStateReader() state.read() expect = set( [ @@ -187,9 +187,9 @@ def test_repo_create_single(config, repo, test_key_03): args = ["-c", config, "repo", "create", repo] if repo == "asdfasdf": with pytest.raises(ValueError): - pyaptly.main(args) + main.main(args) return - pyaptly.main(args) - state = pyaptly.SystemStateReader() + main.main(args) + state = state_reader.SystemStateReader() state.read() assert set(["centrify"]) == state.repos diff --git a/pyaptly/tests/test_snapshot.py b/pyaptly/tests/test_snapshot.py index bf04c83..62f5bf5 100644 --- a/pyaptly/tests/test_snapshot.py +++ b/pyaptly/tests/test_snapshot.py @@ -1,8 +1,7 @@ """Test snapshot functionality.""" import pytest -import pyaptly -from pyaptly import util +from .. import main, state_reader, util @pytest.mark.parametrize("config", ["snapshot.toml"], indirect=True) @@ -24,7 +23,7 @@ def test_snapshot_create(mirror_update, config, exists): args = ["-c", config, "snapshot", "create", mirror] error = False try: - pyaptly.main(args) + main.main(args) except ValueError: error = True assert error != exists @@ -34,8 +33,8 @@ def test_snapshot_create(mirror_update, config, exists): def test_snapshot_create_rotating(mirror_update, config): """Test if rotating snapshot create works.""" args = ["-c", config, "snapshot", "create"] - pyaptly.main(args) - state = pyaptly.SystemStateReader() + main.main(args) + state = state_reader.SystemStateReader() state.read() assert set( [ @@ -62,8 +61,8 @@ def test_snapshot_update_threetimes_rotating(snapshot_update_rotating, config, f "snapshot", "update", ] - pyaptly.main(args) - state = pyaptly.SystemStateReader() + main.main(args) + state = state_reader.SystemStateReader() state.read() assert set( [ @@ -104,8 +103,8 @@ def test_snapshot_update_threetimes_rotating(snapshot_update_rotating, config, f "snapshot", "update", ] - pyaptly.main(args) - state = pyaptly.SystemStateReader() + main.main(args) + state = state_reader.SystemStateReader() state.read() assert set( [ @@ -152,8 +151,8 @@ def test_snapshot_update_threetimes_rotating(snapshot_update_rotating, config, f def test_snapshot_create_repo(config, repo_create): """Test if repo snapshot create works.""" args = ["-c", config, "snapshot", "create"] - pyaptly.main(args) - state = pyaptly.SystemStateReader() + main.main(args) + state = state_reader.SystemStateReader() state.read() assert set(["centrify-latest"]).issubset(state.snapshots) return state diff --git a/pyaptly/tests/test_util.py b/pyaptly/tests/test_util.py index 6a98b3e..f6f8089 100644 --- a/pyaptly/tests/test_util.py +++ b/pyaptly/tests/test_util.py @@ -3,7 +3,7 @@ import pytest -from .. import legacy, util +from .. import snapshot, util EXPECT = """ stdout: 'first @@ -46,15 +46,14 @@ def test_snapshot_spec_as_dict(): snap_string = "snapshot-foo" snap_dict = {"name": "foo"} - # TODO We cannot typecheck this as long as we do not typecheck legacy.py - cfg = { + cfg: dict = { "snapshot": { "foo": {}, } - } # type: ignore + } - assert legacy.snapshot_spec_to_name(cfg, snap_string) == snap_string # type: ignore - assert legacy.snapshot_spec_to_name(cfg, snap_dict) == "foo" # type: ignore + assert snapshot.snapshot_spec_to_name(cfg, snap_string) == snap_string + assert snapshot.snapshot_spec_to_name(cfg, snap_dict) == "foo" def test_get_default_keyserver(): diff --git a/pyaptly/types.py b/pyaptly/types.py new file mode 100644 index 0000000..fed05b0 --- /dev/null +++ b/pyaptly/types.py @@ -0,0 +1,6 @@ +from typing import TYPE_CHECKING, Callable + +if TYPE_CHECKING: # pragma: no cover + from . import command + +SnapshotCommand = Callable[[dict, str, dict], list["command.Command"]] diff --git a/pyaptly/util.py b/pyaptly/util.py index 7f0888b..93345f1 100644 --- a/pyaptly/util.py +++ b/pyaptly/util.py @@ -24,6 +24,18 @@ logger = logging.getLogger(__name__) +def unit_or_list_to_list(thingy): + """Ensures that a yml entry is always a list. Used to allow lists and + single units in the yml file. + + :param thingy: The data to ensure it is a list + :type thingy: list, tuple or other""" + if isinstance(thingy, list) or isinstance(thingy, tuple): + return list(thingy) + else: + return [thingy] + + def get_default_keyserver(): """Get default keyseerver.""" if _PYTEST_KEYSERVER: diff --git a/pyaptly/version.py b/pyaptly/version.py deleted file mode 100644 index 2293fb3..0000000 --- a/pyaptly/version.py +++ /dev/null @@ -1,5 +0,0 @@ -# type: ignore # TODO -# flake8: noqa # TODO - -"""Version module to be read from various places""" -__version__ = "1.2.0" # pragma: no cover diff --git a/pyproject.toml b/pyproject.toml index 491452e..17cc108 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -44,12 +44,11 @@ show_missing = true [tool.poetry.dependencies] python = "^3.11" -pretty-dump = {git = "https://github.com/adfinis/freeze"} -pytz = "^2023.3.post1" pyyaml = "^6.0.1" click = "^8.1.7" tomli = "^2.0.1" tomli-w = "^1.0.0" +frozendict = "^2.4.0" [tool.poetry.group.dev.dependencies] freezegun = "^1.2.2"