diff --git a/.ci/make.sh b/.ci/make.sh index c1ab9fba..648f4c72 100755 --- a/.ci/make.sh +++ b/.ci/make.sh @@ -131,7 +131,7 @@ if [[ "$CMD" == "assemble" ]]; then docker run \ --rm -v $repo/.ci/output:/code/opensearch-py/dist \ $product \ - /bin/bash -c "python /code/opensearch-py/utils/build-dists.py $VERSION" + /bin/bash -c "python /code/opensearch-py/utils/build_dists.py $VERSION" # Verify that there are dists in .ci/output if compgen -G ".ci/output/*" > /dev/null; then diff --git a/CHANGELOG.md b/CHANGELOG.md index 12472c2a..2f6c1a62 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,6 +3,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) ## [Unreleased] ### Added +- Added pylint, enforcing `line-too-long` and `invalid-name` ([#590](https://github.com/opensearch-project/opensearch-py/pull/590)) ### Changed ### Deprecated ### Removed diff --git a/benchmarks/bench_async.py b/benchmarks/bench_async.py index baeb7d80..02c0a238 100644 --- a/benchmarks/bench_async.py +++ b/benchmarks/bench_async.py @@ -16,14 +16,8 @@ from opensearchpy import AsyncHttpConnection, AsyncOpenSearch -host = "localhost" -port = 9200 -auth = ("admin", "admin") -index_name = "test-index-async" -item_count = 100 - -async def index_records(client: Any, item_count: int) -> None: +async def index_records(client: Any, index_name: str, item_count: int) -> None: await asyncio.gather( *[ client.index( @@ -41,6 +35,11 @@ async def index_records(client: Any, item_count: int) -> None: async def test_async(client_count: int = 1, item_count: int = 1) -> None: + host = "localhost" + port = 9200 + auth = ("admin", "admin") + index_name = "test-index-async" + clients = [] for i in range(client_count): clients.append( @@ -61,7 +60,10 @@ async def test_async(client_count: int = 1, item_count: int = 1) -> None: await clients[0].indices.create(index_name) await asyncio.gather( - *[index_records(clients[i], item_count) for i in range(client_count)] + *[ + index_records(clients[i], index_name, item_count) + for i in range(client_count) + ] ) await clients[0].indices.refresh(index=index_name) @@ -79,28 +81,31 @@ def test(item_count: int = 1, client_count: int = 1) -> None: loop.close() +ITEM_COUNT = 100 + + def test_1() -> None: - test(1, 32 * item_count) + test(1, 32 * ITEM_COUNT) def test_2() -> None: - test(2, 16 * item_count) + test(2, 16 * ITEM_COUNT) def test_4() -> None: - test(4, 8 * item_count) + test(4, 8 * ITEM_COUNT) def test_8() -> None: - test(8, 4 * item_count) + test(8, 4 * ITEM_COUNT) def test_16() -> None: - test(16, 2 * item_count) + test(16, 2 * ITEM_COUNT) def test_32() -> None: - test(32, item_count) + test(32, ITEM_COUNT) __benchmarks__ = [(test_1, test_8, "1 client vs. more clients (async)")] diff --git a/benchmarks/bench_info_sync.py b/benchmarks/bench_info_sync.py index 0c69a102..bc891067 100644 --- a/benchmarks/bench_info_sync.py +++ b/benchmarks/bench_info_sync.py @@ -20,22 +20,6 @@ from opensearchpy import OpenSearch -host = "localhost" -port = 9200 -auth = ("admin", "admin") -request_count = 250 - - -root = logging.getLogger() -# root.setLevel(logging.DEBUG) -# logging.getLogger("urllib3.connectionpool").setLevel(logging.DEBUG) - -handler = logging.StreamHandler(sys.stdout) -handler.setLevel(logging.DEBUG) -formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s") -handler.setFormatter(formatter) -root.addHandler(handler) - def get_info(client: Any, request_count: int) -> float: tt: float = 0 @@ -48,6 +32,22 @@ def get_info(client: Any, request_count: int) -> float: def test(thread_count: int = 1, request_count: int = 1, client_count: int = 1) -> None: + host = "localhost" + port = 9200 + auth = ("admin", "admin") + + root = logging.getLogger() + # root.setLevel(logging.DEBUG) + # logging.getLogger("urllib3.connectionpool").setLevel(logging.DEBUG) + + handler = logging.StreamHandler(sys.stdout) + handler.setLevel(logging.DEBUG) + formatter = logging.Formatter( + "%(asctime)s - %(name)s - %(levelname)s - %(message)s" + ) + handler.setFormatter(formatter) + root.addHandler(handler) + clients = [] for i in range(client_count): clients.append( @@ -76,24 +76,27 @@ def test(thread_count: int = 1, request_count: int = 1, client_count: int = 1) - print(f"latency={latency}") +REQUEST_COUNT = 250 + + def test_1() -> None: - test(1, 32 * request_count, 1) + test(1, 32 * REQUEST_COUNT, 1) def test_2() -> None: - test(2, 16 * request_count, 2) + test(2, 16 * REQUEST_COUNT, 2) def test_4() -> None: - test(4, 8 * request_count, 3) + test(4, 8 * REQUEST_COUNT, 3) def test_8() -> None: - test(8, 4 * request_count, 8) + test(8, 4 * REQUEST_COUNT, 8) def test_32() -> None: - test(32, request_count, 32) + test(32, REQUEST_COUNT, 32) __benchmarks__ = [(test_1, test_32, "1 thread vs. 32 threads (sync)")] diff --git a/benchmarks/bench_sync.py b/benchmarks/bench_sync.py index 004fa2e4..7b4695eb 100644 --- a/benchmarks/bench_sync.py +++ b/benchmarks/bench_sync.py @@ -21,24 +21,8 @@ from opensearchpy import OpenSearch, Urllib3HttpConnection -host = "localhost" -port = 9200 -auth = ("admin", "admin") -index_name = "test-index-sync" -item_count = 1000 -root = logging.getLogger() -# root.setLevel(logging.DEBUG) -# logging.getLogger("urllib3.connectionpool").setLevel(logging.DEBUG) - -handler = logging.StreamHandler(sys.stdout) -handler.setLevel(logging.DEBUG) -formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s") -handler.setFormatter(formatter) -root.addHandler(handler) - - -def index_records(client: Any, item_count: int) -> Any: +def index_records(client: Any, index_name: str, item_count: int) -> Any: tt = 0 for n in range(10): data: Any = [] @@ -65,6 +49,23 @@ def index_records(client: Any, item_count: int) -> Any: def test(thread_count: int = 1, item_count: int = 1, client_count: int = 1) -> None: + host = "localhost" + port = 9200 + auth = ("admin", "admin") + index_name = "test-index-sync" + + root = logging.getLogger() + # root.setLevel(logging.DEBUG) + # logging.getLogger("urllib3.connectionpool").setLevel(logging.DEBUG) + + handler = logging.StreamHandler(sys.stdout) + handler.setLevel(logging.DEBUG) + formatter = logging.Formatter( + "%(asctime)s - %(name)s - %(levelname)s - %(message)s" + ) + handler.setFormatter(formatter) + root.addHandler(handler) + clients = [] for i in range(client_count): clients.append( @@ -96,7 +97,8 @@ def test(thread_count: int = 1, item_count: int = 1, client_count: int = 1) -> N threads = [] for thread_id in range(thread_count): thread = ThreadWithReturnValue( - target=index_records, args=[clients[thread_id % len(clients)], item_count] + target=index_records, + args=[clients[thread_id % len(clients)], index_name, item_count], ) threads.append(thread) thread.start() @@ -113,24 +115,27 @@ def test(thread_count: int = 1, item_count: int = 1, client_count: int = 1) -> N print(f"{count}, latency={latency}") +ITEM_COUNT = 1000 + + def test_1() -> None: - test(1, 32 * item_count, 1) + test(1, 32 * ITEM_COUNT, 1) def test_2() -> None: - test(2, 16 * item_count, 2) + test(2, 16 * ITEM_COUNT, 2) def test_4() -> None: - test(4, 8 * item_count, 3) + test(4, 8 * ITEM_COUNT, 3) def test_8() -> None: - test(8, 4 * item_count, 8) + test(8, 4 * ITEM_COUNT, 8) def test_32() -> None: - test(32, item_count, 32) + test(32, ITEM_COUNT, 32) __benchmarks__ = [(test_1, test_32, "1 thread vs. 32 threads (sync)")] diff --git a/benchmarks/poetry.lock b/benchmarks/poetry.lock index a598d001..d4c2e4a8 100644 --- a/benchmarks/poetry.lock +++ b/benchmarks/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.0 and should not be changed by hand. [[package]] name = "aiohttp" @@ -183,101 +183,101 @@ files = [ [[package]] name = "charset-normalizer" -version = "3.3.0" +version = "3.3.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-3.3.0.tar.gz", hash = "sha256:63563193aec44bce707e0c5ca64ff69fa72ed7cf34ce6e11d5127555756fd2f6"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:effe5406c9bd748a871dbcaf3ac69167c38d72db8c9baf3ff954c344f31c4cbe"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4162918ef3098851fcd8a628bf9b6a98d10c380725df9e04caf5ca6dd48c847a"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0570d21da019941634a531444364f2482e8db0b3425fcd5ac0c36565a64142c8"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5707a746c6083a3a74b46b3a631d78d129edab06195a92a8ece755aac25a3f3d"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:278c296c6f96fa686d74eb449ea1697f3c03dc28b75f873b65b5201806346a69"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a4b71f4d1765639372a3b32d2638197f5cd5221b19531f9245fcc9ee62d38f56"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5969baeaea61c97efa706b9b107dcba02784b1601c74ac84f2a532ea079403e"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3f93dab657839dfa61025056606600a11d0b696d79386f974e459a3fbc568ec"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:db756e48f9c5c607b5e33dd36b1d5872d0422e960145b08ab0ec7fd420e9d649"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:232ac332403e37e4a03d209a3f92ed9071f7d3dbda70e2a5e9cff1c4ba9f0678"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e5c1502d4ace69a179305abb3f0bb6141cbe4714bc9b31d427329a95acfc8bdd"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:2502dd2a736c879c0f0d3e2161e74d9907231e25d35794584b1ca5284e43f596"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23e8565ab7ff33218530bc817922fae827420f143479b753104ab801145b1d5b"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-win32.whl", hash = "sha256:1872d01ac8c618a8da634e232f24793883d6e456a66593135aeafe3784b0848d"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:557b21a44ceac6c6b9773bc65aa1b4cc3e248a5ad2f5b914b91579a32e22204d"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d7eff0f27edc5afa9e405f7165f85a6d782d308f3b6b9d96016c010597958e63"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6a685067d05e46641d5d1623d7c7fdf15a357546cbb2f71b0ebde91b175ffc3e"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0d3d5b7db9ed8a2b11a774db2bbea7ba1884430a205dbd54a32d61d7c2a190fa"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2935ffc78db9645cb2086c2f8f4cfd23d9b73cc0dc80334bc30aac6f03f68f8c"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fe359b2e3a7729010060fbca442ca225280c16e923b37db0e955ac2a2b72a05"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:380c4bde80bce25c6e4f77b19386f5ec9db230df9f2f2ac1e5ad7af2caa70459"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0d1e3732768fecb052d90d62b220af62ead5748ac51ef61e7b32c266cac9293"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1b2919306936ac6efb3aed1fbf81039f7087ddadb3160882a57ee2ff74fd2382"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f8888e31e3a85943743f8fc15e71536bda1c81d5aa36d014a3c0c44481d7db6e"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:82eb849f085624f6a607538ee7b83a6d8126df6d2f7d3b319cb837b289123078"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7b8b8bf1189b3ba9b8de5c8db4d541b406611a71a955bbbd7385bbc45fcb786c"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5adf257bd58c1b8632046bbe43ee38c04e1038e9d37de9c57a94d6bd6ce5da34"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c350354efb159b8767a6244c166f66e67506e06c8924ed74669b2c70bc8735b1"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-win32.whl", hash = "sha256:02af06682e3590ab952599fbadac535ede5d60d78848e555aa58d0c0abbde786"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:86d1f65ac145e2c9ed71d8ffb1905e9bba3a91ae29ba55b4c46ae6fc31d7c0d4"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:3b447982ad46348c02cb90d230b75ac34e9886273df3a93eec0539308a6296d7"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:abf0d9f45ea5fb95051c8bfe43cb40cda383772f7e5023a83cc481ca2604d74e"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b09719a17a2301178fac4470d54b1680b18a5048b481cb8890e1ef820cb80455"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b3d9b48ee6e3967b7901c052b670c7dda6deb812c309439adaffdec55c6d7b78"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:edfe077ab09442d4ef3c52cb1f9dab89bff02f4524afc0acf2d46be17dc479f5"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3debd1150027933210c2fc321527c2299118aa929c2f5a0a80ab6953e3bd1908"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86f63face3a527284f7bb8a9d4f78988e3c06823f7bea2bd6f0e0e9298ca0403"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:24817cb02cbef7cd499f7c9a2735286b4782bd47a5b3516a0e84c50eab44b98e"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c71f16da1ed8949774ef79f4a0260d28b83b3a50c6576f8f4f0288d109777989"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:9cf3126b85822c4e53aa28c7ec9869b924d6fcfb76e77a45c44b83d91afd74f9"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:b3b2316b25644b23b54a6f6401074cebcecd1244c0b8e80111c9a3f1c8e83d65"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:03680bb39035fbcffe828eae9c3f8afc0428c91d38e7d61aa992ef7a59fb120e"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4cc152c5dd831641e995764f9f0b6589519f6f5123258ccaca8c6d34572fefa8"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-win32.whl", hash = "sha256:b8f3307af845803fb0b060ab76cf6dd3a13adc15b6b451f54281d25911eb92df"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:8eaf82f0eccd1505cf39a45a6bd0a8cf1c70dcfc30dba338207a969d91b965c0"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dc45229747b67ffc441b3de2f3ae5e62877a282ea828a5bdb67883c4ee4a8810"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f4a0033ce9a76e391542c182f0d48d084855b5fcba5010f707c8e8c34663d77"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ada214c6fa40f8d800e575de6b91a40d0548139e5dc457d2ebb61470abf50186"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b1121de0e9d6e6ca08289583d7491e7fcb18a439305b34a30b20d8215922d43c"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1063da2c85b95f2d1a430f1c33b55c9c17ffaf5e612e10aeaad641c55a9e2b9d"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:70f1d09c0d7748b73290b29219e854b3207aea922f839437870d8cc2168e31cc"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:250c9eb0f4600361dd80d46112213dff2286231d92d3e52af1e5a6083d10cad9"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:750b446b2ffce1739e8578576092179160f6d26bd5e23eb1789c4d64d5af7dc7"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:fc52b79d83a3fe3a360902d3f5d79073a993597d48114c29485e9431092905d8"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:588245972aca710b5b68802c8cad9edaa98589b1b42ad2b53accd6910dad3545"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e39c7eb31e3f5b1f88caff88bcff1b7f8334975b46f6ac6e9fc725d829bc35d4"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-win32.whl", hash = "sha256:abecce40dfebbfa6abf8e324e1860092eeca6f7375c8c4e655a8afb61af58f2c"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:24a91a981f185721542a0b7c92e9054b7ab4fea0508a795846bc5b0abf8118d4"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:67b8cc9574bb518ec76dc8e705d4c39ae78bb96237cb533edac149352c1f39fe"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac71b2977fb90c35d41c9453116e283fac47bb9096ad917b8819ca8b943abecd"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3ae38d325b512f63f8da31f826e6cb6c367336f95e418137286ba362925c877e"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:542da1178c1c6af8873e143910e2269add130a299c9106eef2594e15dae5e482"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:30a85aed0b864ac88309b7d94be09f6046c834ef60762a8833b660139cfbad13"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aae32c93e0f64469f74ccc730a7cb21c7610af3a775157e50bbd38f816536b38"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15b26ddf78d57f1d143bdf32e820fd8935d36abe8a25eb9ec0b5a71c82eb3895"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f5d10bae5d78e4551b7be7a9b29643a95aded9d0f602aa2ba584f0388e7a557"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:249c6470a2b60935bafd1d1d13cd613f8cd8388d53461c67397ee6a0f5dce741"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c5a74c359b2d47d26cdbbc7845e9662d6b08a1e915eb015d044729e92e7050b7"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:b5bcf60a228acae568e9911f410f9d9e0d43197d030ae5799e20dca8df588287"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:187d18082694a29005ba2944c882344b6748d5be69e3a89bf3cc9d878e548d5a"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:81bf654678e575403736b85ba3a7867e31c2c30a69bc57fe88e3ace52fb17b89"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-win32.whl", hash = "sha256:85a32721ddde63c9df9ebb0d2045b9691d9750cb139c161c80e500d210f5e26e"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:468d2a840567b13a590e67dd276c570f8de00ed767ecc611994c301d0f8c014f"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e0fc42822278451bc13a2e8626cf2218ba570f27856b536e00cfa53099724828"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:09c77f964f351a7369cc343911e0df63e762e42bac24cd7d18525961c81754f4"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:12ebea541c44fdc88ccb794a13fe861cc5e35d64ed689513a5c03d05b53b7c82"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:805dfea4ca10411a5296bcc75638017215a93ffb584c9e344731eef0dcfb026a"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:96c2b49eb6a72c0e4991d62406e365d87067ca14c1a729a870d22354e6f68115"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aaf7b34c5bc56b38c931a54f7952f1ff0ae77a2e82496583b247f7c969eb1479"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:619d1c96099be5823db34fe89e2582b336b5b074a7f47f819d6b3a57ff7bdb86"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0ac5e7015a5920cfce654c06618ec40c33e12801711da6b4258af59a8eff00a"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:93aa7eef6ee71c629b51ef873991d6911b906d7312c6e8e99790c0f33c576f89"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7966951325782121e67c81299a031f4c115615e68046f79b85856b86ebffc4cd"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:02673e456dc5ab13659f85196c534dc596d4ef260e4d86e856c3b2773ce09843"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:c2af80fb58f0f24b3f3adcb9148e6203fa67dd3f61c4af146ecad033024dde43"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:153e7b6e724761741e0974fc4dcd406d35ba70b92bfe3fedcb497226c93b9da7"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-win32.whl", hash = "sha256:d47ecf253780c90ee181d4d871cd655a789da937454045b17b5798da9393901a"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:d97d85fa63f315a8bdaba2af9a6a686e0eceab77b3089af45133252618e70884"}, - {file = "charset_normalizer-3.3.0-py3-none-any.whl", hash = "sha256:e46cd37076971c1040fc8c41273a8b3e2c624ce4f2be3f5dfcb7a430c1d3acc2"}, + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, ] [[package]] @@ -528,12 +528,12 @@ certifi = ">=2022.12.07" python-dateutil = "*" requests = ">=2.4.0,<3.0.0" six = "*" -urllib3 = ">=1.26.9" +urllib3 = ">=1.26.18" [package.extras] async = ["aiohttp (>=3,<4)"] -develop = ["black", "botocore", "coverage (<7.0.0)", "jinja2", "mock", "myst_parser", "pytest (>=3.0.0)", "pytest-cov", "pytest-mock (<4.0.0)", "pytz", "pyyaml", "requests (>=2.0.0,<3.0.0)", "sphinx", "sphinx_copybutton", "sphinx_rtd_theme"] -docs = ["myst_parser", "sphinx", "sphinx_copybutton", "sphinx_rtd_theme"] +develop = ["black", "botocore", "coverage (<8.0.0)", "jinja2", "mock", "myst_parser", "pytest (>=3.0.0)", "pytest-cov", "pytest-mock (<4.0.0)", "pytz", "pyyaml", "requests (>=2.0.0,<3.0.0)", "sphinx", "sphinx_copybutton", "sphinx_rtd_theme"] +docs = ["aiohttp (>=3,<4)", "myst_parser", "sphinx", "sphinx_copybutton", "sphinx_rtd_theme"] kerberos = ["requests_kerberos"] [package.source] @@ -556,71 +556,71 @@ plugins = ["importlib-metadata"] [[package]] name = "pyinstrument" -version = "4.6.0" +version = "4.6.1" description = "Call stack profiler for Python. Shows you why your code is slow!" optional = false python-versions = ">=3.7" files = [ - {file = "pyinstrument-4.6.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:679b5397e3e6c0d6f56df50ba8c683543df4f1f7c1df2e2eb728e275bde2c85b"}, - {file = "pyinstrument-4.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:18479ffa0c922695ba2befab29521b62bfe75debef48d818cea46262cee48a1e"}, - {file = "pyinstrument-4.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daba103955d0d0b37b8bc20a4e8cc6477e839ce5984478fcf3f7cee8318e9636"}, - {file = "pyinstrument-4.6.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d93451e9c7650629b0bc12caa7390f81d1a15835c07f7dc170e953d4684ed1e7"}, - {file = "pyinstrument-4.6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01009a7b58a6f11bf5560c23848ea2881acac974b0841fe5d365ef154baabd6f"}, - {file = "pyinstrument-4.6.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:288ea44da6333dacc77b4ba2149dba3dc1e9fbbebd3d5dc51a66c20839d80ef3"}, - {file = "pyinstrument-4.6.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ecc106213146dd90659a1483047b3a1c2e174fb190c0e109234e524a4651e377"}, - {file = "pyinstrument-4.6.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5cd8ab30c8dcd1511e9b3b98f601f17f2c5c9df1d28f8298d215c63d68919bdc"}, - {file = "pyinstrument-4.6.0-cp310-cp310-win32.whl", hash = "sha256:40e3656e6ace5a140880bd980a25f6a356c094c36e28ed1bf935d7349a78b1b6"}, - {file = "pyinstrument-4.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:d9623fc3fde47ae90ad5014737e37034b4abc3fbfb455b7b56cc095f9037d5af"}, - {file = "pyinstrument-4.6.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:beaaa3b647b3a4cbd34b71eacaa31e3eb90e1bf53e15ada3ac7e9df09d737239"}, - {file = "pyinstrument-4.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0c69ab570609ac93b5f4ab2e5ccbf8add4f69a962b06307eea66ba65b5ad9d38"}, - {file = "pyinstrument-4.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5992748a74ec7ff445e4b56b5e316673c34b6cdbd3755111f7c023d8a141f001"}, - {file = "pyinstrument-4.6.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bb1ba76c4e912cae159ab9729c7b31bb6d7fe8ed1f0fafce74484a4bb159c240"}, - {file = "pyinstrument-4.6.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:674868ebc3663b01d7d059a6f5cdeff6f18b49e217617720a5d645a6b55ead03"}, - {file = "pyinstrument-4.6.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:514a0ced357ff400988f599b0294d05e3b68468f9ab876f204bf12765f7fdb1b"}, - {file = "pyinstrument-4.6.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8ccd1f5b4ad35c734dcf2d08d80b5b37205b4e84aa71fe76f95e43bd30c5eef9"}, - {file = "pyinstrument-4.6.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:611c6cd33f42f19e46d99eeef3b84a47d33fe34cdb0ce6e3635d2ee5038706a3"}, - {file = "pyinstrument-4.6.0-cp311-cp311-win32.whl", hash = "sha256:d20b5cf79bca1b3d425a7362457621741393b1d5ce2d920583541b947bc8a368"}, - {file = "pyinstrument-4.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:ecd8cf03b04dc1b7f151896228993c6aa0fa897cdd517ea127465bc1c826c5b5"}, - {file = "pyinstrument-4.6.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:3d4bed520c0f689a75bca4951f6b7fbad96851e8461086c98e03eb726f8a412a"}, - {file = "pyinstrument-4.6.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b74745f1d22133da8d4a38dd0c78c02c00154a5b7683bdd5df56a7c7705a979b"}, - {file = "pyinstrument-4.6.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b6ab698400e8401597e39c4816efa247f2b98c9b4e59e3ec25d534ae6887bd93"}, - {file = "pyinstrument-4.6.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de1a36a083b324dafe5e2880e5e04267a1983beb027f12c3dc361ddbe3acf9af"}, - {file = "pyinstrument-4.6.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8adc4f87d4289c1f04f19451b5133b8e307bd9b08c364c48e007ba663fefbf1b"}, - {file = "pyinstrument-4.6.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:08fbc75d3615be6259b7af0c173c7bc48acb6e7bd758678d54eb411ba2903052"}, - {file = "pyinstrument-4.6.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:d86fea6ce117bcff642e24208eb573c00d78b4c2934eb9bd5f915751980cc9bd"}, - {file = "pyinstrument-4.6.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23a3b21373e0c8bf0d00dda79989fcab0bb1d30094f7b210d40d2226fe20e141"}, - {file = "pyinstrument-4.6.0-cp312-cp312-win32.whl", hash = "sha256:a498c82d93621c5cf736e4660142ac0c3bbcb7b059bcbd4278a6364037128656"}, - {file = "pyinstrument-4.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:9116154446b9999f6524e9db29310aee6476a5a471c276928f2b46b6655a2dcc"}, - {file = "pyinstrument-4.6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:704c6d38abef8fca2e1085756c9574ea180f7ac866aab6943b483152c2828c2a"}, - {file = "pyinstrument-4.6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cbebdc11d4fc6f3123c046d84db88c7f605d53247e3f357314d0c5775d1beaf4"}, - {file = "pyinstrument-4.6.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c7a7bae4cce5f8d084153857cedbce29ca8274c9924884d0461a5db48619c5d"}, - {file = "pyinstrument-4.6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03289b10715e261a5c33b267d0a430d1b408f929922fde0a9fd311835c60351b"}, - {file = "pyinstrument-4.6.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7f83544ff9abfacdf64b39498ca3dcd454956e44aedb5f67626b7212291c9160"}, - {file = "pyinstrument-4.6.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:40640f02fe7865540e8a1e51bf7f9d2403e3364c3b7edfdb9dae5eb5596811da"}, - {file = "pyinstrument-4.6.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:f3719464888d7303e1081996bc56ab75ef5cdf7ef69ccbb7b29f48eb37d8f8b9"}, - {file = "pyinstrument-4.6.0-cp37-cp37m-win32.whl", hash = "sha256:46e16de6bd3b74ef01b6457d862fee751515315edb5e9283205e45299a29ac49"}, - {file = "pyinstrument-4.6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:9ded87ae11cb0a95a767c817908833ec0821fe0e81650968b201a031edf4bc15"}, - {file = "pyinstrument-4.6.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8bf16e459a868d9dbaacff4f0a0acd6ad78ce36f2aceabf21e9fd0c3b6aca0d4"}, - {file = "pyinstrument-4.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cb83e445795431c3d867b298c0583ee27717bbc50e5120a4c98575c979ab3ab8"}, - {file = "pyinstrument-4.6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29072b1be183e173d7b0f12caf29f8717d273afbf34df950f5fa0d98127cd3fb"}, - {file = "pyinstrument-4.6.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09502af2a383c59e5a0d3bebfab7e5845f79122348358e9e52b2b0187db84a44"}, - {file = "pyinstrument-4.6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a23c982eb9c4d2f8fe553dacb9bdc0991170a0998b94c84f75c2a052e8af4c74"}, - {file = "pyinstrument-4.6.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f7a38ef482f2151393e729c5582191e4ab05f0ed1fa56b16c2377ff3129107af"}, - {file = "pyinstrument-4.6.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e983e16c2fdfb752387133380859c3414e119e41c14f39f5f869f29dcf6e995c"}, - {file = "pyinstrument-4.6.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d00c87e5cea48a562d67f0436999463b7989cff2e4c196b0e8ba06d515f191a9"}, - {file = "pyinstrument-4.6.0-cp38-cp38-win32.whl", hash = "sha256:a24c95cabf2ca5d79b62dbc8ff17749768b8aafd777841352f59f4ffd6688782"}, - {file = "pyinstrument-4.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:f3d88b66dbbcdc6e4c57bd8574ad9d096cd23285eee0f4a5cf74f0e0df6aa190"}, - {file = "pyinstrument-4.6.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2bcfec45cdbb9edf6d5853debac4a792de589e621be07a71dc76acb36e144a3a"}, - {file = "pyinstrument-4.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e790515a22844bbccaa388c7715b037c45a8d0155c4a6f2990659998a8920501"}, - {file = "pyinstrument-4.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93a30e0d93633a28d4adcf7d7e2d158d6331809b95c2c4a155da17ea1e43eaa3"}, - {file = "pyinstrument-4.6.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa554eb8ef1c54849dbf480965b073f39b39b517e466ce241808a00398f9742a"}, - {file = "pyinstrument-4.6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e289898c644cbbb61d931bbcb6505e2a279ad1122612c9098bfb0958ebf5764"}, - {file = "pyinstrument-4.6.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:20ce0f1612a019888a6b94fa7f1e7862842f0b5219282e3354d5b35aceb363f6"}, - {file = "pyinstrument-4.6.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4935f3cdb9062fceac65c50de76f07e05cf630bd3a9c663fedc9e88b5efe7d7c"}, - {file = "pyinstrument-4.6.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:dc9c4577ef4b06ae1592c920d0a4f0f0db587a16f530c629ad93e125bc79ebb7"}, - {file = "pyinstrument-4.6.0-cp39-cp39-win32.whl", hash = "sha256:3ec6b04d8cfb34aec48de7fa77aeb919e8e7e19909740ab7a5553339f6f4c53a"}, - {file = "pyinstrument-4.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:8a6d2e5c15f989629fac41536ec2ca1fe81359fadf4dadf2ff24fe96b389f6df"}, - {file = "pyinstrument-4.6.0.tar.gz", hash = "sha256:3e509e879c853dbc5fdc1757f0cfdbf8bee899c80f53d504a7df28898f0fa8ed"}, + {file = "pyinstrument-4.6.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:73476e4bc6e467ac1b2c3c0dd1f0b71c9061d4de14626676adfdfbb14aa342b4"}, + {file = "pyinstrument-4.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4d1da8efd974cf9df52ee03edaee2d3875105ddd00de35aa542760f7c612bdf7"}, + {file = "pyinstrument-4.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:507be1ee2f2b0c9fba74d622a272640dd6d1b0c9ec3388b2cdeb97ad1e77125f"}, + {file = "pyinstrument-4.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:95cee6de08eb45754ef4f602ce52b640d1c535d934a6a8733a974daa095def37"}, + {file = "pyinstrument-4.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7873e8cec92321251fdf894a72b3c78f4c5c20afdd1fef0baf9042ec843bb04"}, + {file = "pyinstrument-4.6.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a242f6cac40bc83e1f3002b6b53681846dfba007f366971db0bf21e02dbb1903"}, + {file = "pyinstrument-4.6.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:97c9660cdb4bd2a43cf4f3ab52cffd22f3ac9a748d913b750178fb34e5e39e64"}, + {file = "pyinstrument-4.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e304cd0723e2b18ada5e63c187abf6d777949454c734f5974d64a0865859f0f4"}, + {file = "pyinstrument-4.6.1-cp310-cp310-win32.whl", hash = "sha256:cee21a2d78187dd8a80f72f5d0f1ddb767b2d9800f8bb4d94b6d11f217c22cdb"}, + {file = "pyinstrument-4.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:2000712f71d693fed2f8a1c1638d37b7919124f367b37976d07128d49f1445eb"}, + {file = "pyinstrument-4.6.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a366c6f3dfb11f1739bdc1dee75a01c1563ad0bf4047071e5e77598087df457f"}, + {file = "pyinstrument-4.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c6be327be65d934796558aa9cb0f75ce62ebd207d49ad1854610c97b0579ad47"}, + {file = "pyinstrument-4.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e160d9c5d20d3e4ef82269e4e8b246ff09bdf37af5fb8cb8ccca97936d95ad6"}, + {file = "pyinstrument-4.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ffbf56605ef21c2fcb60de2fa74ff81f417d8be0c5002a407e414d6ef6dee43"}, + {file = "pyinstrument-4.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c92cc4924596d6e8f30a16182bbe90893b1572d847ae12652f72b34a9a17c24a"}, + {file = "pyinstrument-4.6.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f4b48a94d938cae981f6948d9ec603bab2087b178d2095d042d5a48aabaecaab"}, + {file = "pyinstrument-4.6.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e7a386392275bdef4a1849712dc5b74f0023483fca14ef93d0ca27d453548982"}, + {file = "pyinstrument-4.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:871b131b83e9b1122f2325061c68ed1e861eebcb568c934d2fb193652f077f77"}, + {file = "pyinstrument-4.6.1-cp311-cp311-win32.whl", hash = "sha256:8d8515156dd91f5652d13b5fcc87e634f8fe1c07b68d1d0840348cdd50bf5ace"}, + {file = "pyinstrument-4.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb868fbe089036e9f32525a249f4c78b8dc46967612393f204b8234f439c9cc4"}, + {file = "pyinstrument-4.6.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:a18cd234cce4f230f1733807f17a134e64a1f1acabf74a14d27f583cf2b183df"}, + {file = "pyinstrument-4.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:574cfca69150be4ce4461fb224712fbc0722a49b0dc02fa204d02807adf6b5a0"}, + {file = "pyinstrument-4.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e02cf505e932eb8ccf561b7527550a67ec14fcae1fe0e25319b09c9c166e914"}, + {file = "pyinstrument-4.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:832fb2acef9d53701c1ab546564c45fb70a8770c816374f8dd11420d399103c9"}, + {file = "pyinstrument-4.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13cb57e9607545623ebe462345b3d0c4caee0125d2d02267043ece8aca8f4ea0"}, + {file = "pyinstrument-4.6.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9be89e7419bcfe8dd6abb0d959d6d9c439c613a4a873514c43d16b48dae697c9"}, + {file = "pyinstrument-4.6.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:476785cfbc44e8e1b1ad447398aa3deae81a8df4d37eb2d8bbb0c404eff979cd"}, + {file = "pyinstrument-4.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e9cebd90128a3d2fee36d3ccb665c1b9dce75261061b2046203e45c4a8012d54"}, + {file = "pyinstrument-4.6.1-cp312-cp312-win32.whl", hash = "sha256:1d0b76683df2ad5c40eff73607dc5c13828c92fbca36aff1ddf869a3c5a55fa6"}, + {file = "pyinstrument-4.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:c4b7af1d9d6a523cfbfedebcb69202242d5bd0cb89c4e094cc73d5d6e38279bd"}, + {file = "pyinstrument-4.6.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:79ae152f8c6a680a188fb3be5e0f360ac05db5bbf410169a6c40851dfaebcce9"}, + {file = "pyinstrument-4.6.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:07cad2745964c174c65aa75f1bf68a4394d1b4d28f33894837cfd315d1e836f0"}, + {file = "pyinstrument-4.6.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cb81f66f7f94045d723069cf317453d42375de9ff3c69089cf6466b078ac1db4"}, + {file = "pyinstrument-4.6.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ab30ae75969da99e9a529e21ff497c18fdf958e822753db4ae7ed1e67094040"}, + {file = "pyinstrument-4.6.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f36cb5b644762fb3c86289324bbef17e95f91cd710603ac19444a47f638e8e96"}, + {file = "pyinstrument-4.6.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:8b45075d9dbbc977dbc7007fb22bb0054c6990fbe91bf48dd80c0b96c6307ba7"}, + {file = "pyinstrument-4.6.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:475ac31477f6302e092463896d6a2055f3e6abcd293bad16ff94fc9185308a88"}, + {file = "pyinstrument-4.6.1-cp37-cp37m-win32.whl", hash = "sha256:29172ab3d8609fdf821c3f2562dc61e14f1a8ff5306607c32ca743582d3a760e"}, + {file = "pyinstrument-4.6.1-cp37-cp37m-win_amd64.whl", hash = "sha256:bd176f297c99035127b264369d2bb97a65255f65f8d4e843836baf55ebb3cee4"}, + {file = "pyinstrument-4.6.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:23e9b4526978432e9999021da9a545992cf2ac3df5ee82db7beb6908fc4c978c"}, + {file = "pyinstrument-4.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2dbcaccc9f456ef95557ec501caeb292119c24446d768cb4fb43578b0f3d572c"}, + {file = "pyinstrument-4.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2097f63c66c2bc9678c826b9ff0c25acde3ed455590d9dcac21220673fe74fbf"}, + {file = "pyinstrument-4.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:205ac2e76bd65d61b9611a9ce03d5f6393e34ec5b41dd38808f25d54e6b3e067"}, + {file = "pyinstrument-4.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f414ddf1161976a40fc0a333000e6a4ad612719eac0b8c9bb73f47153187148"}, + {file = "pyinstrument-4.6.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:65e62ebfa2cd8fb57eda90006f4505ac4c70da00fc2f05b6d8337d776ea76d41"}, + {file = "pyinstrument-4.6.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d96309df4df10be7b4885797c5f69bb3a89414680ebaec0722d8156fde5268c3"}, + {file = "pyinstrument-4.6.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f3d1ad3bc8ebb4db925afa706aa865c4bfb40d52509f143491ac0df2440ee5d2"}, + {file = "pyinstrument-4.6.1-cp38-cp38-win32.whl", hash = "sha256:dc37cb988c8854eb42bda2e438aaf553536566657d157c4473cc8aad5692a779"}, + {file = "pyinstrument-4.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:2cd4ce750c34a0318fc2d6c727cc255e9658d12a5cf3f2d0473f1c27157bdaeb"}, + {file = "pyinstrument-4.6.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6ca95b21f022e995e062b371d1f42d901452bcbedd2c02f036de677119503355"}, + {file = "pyinstrument-4.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ac1e1d7e1f1b64054c4eb04eb4869a7a5eef2261440e73943cc1b1bc3c828c18"}, + {file = "pyinstrument-4.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0711845e953fce6ab781221aacffa2a66dbc3289f8343e5babd7b2ea34da6c90"}, + {file = "pyinstrument-4.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b7d28582017de35cb64eb4e4fa603e753095108ca03745f5d17295970ee631f"}, + {file = "pyinstrument-4.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7be57db08bd366a37db3aa3a6187941ee21196e8b14975db337ddc7d1490649d"}, + {file = "pyinstrument-4.6.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9a0ac0f56860398d2628ce389826ce83fb3a557d0c9a2351e8a2eac6eb869983"}, + {file = "pyinstrument-4.6.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a9045186ff13bc826fef16be53736a85029aae3c6adfe52e666cad00d7ca623b"}, + {file = "pyinstrument-4.6.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6c4c56b6eab9004e92ad8a48bb54913fdd71fc8a748ae42a27b9e26041646f8b"}, + {file = "pyinstrument-4.6.1-cp39-cp39-win32.whl", hash = "sha256:37e989c44b51839d0c97466fa2b623638b9470d56d79e329f359f0e8fa6d83db"}, + {file = "pyinstrument-4.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:5494c5a84fee4309d7d973366ca6b8b9f8ba1d6b254e93b7c506264ef74f2cef"}, + {file = "pyinstrument-4.6.1.tar.gz", hash = "sha256:f4731b27121350f5a983d358d2272fe3df2f538aed058f57217eef7801a89288"}, ] [package.extras] @@ -667,13 +667,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "rich" -version = "13.6.0" +version = "13.7.0" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.7.0" files = [ - {file = "rich-13.6.0-py3-none-any.whl", hash = "sha256:2b38e2fe9ca72c9a00170a1a2d20c63c790d0e10ef1fe35eba76e1e7b1d7d245"}, - {file = "rich-13.6.0.tar.gz", hash = "sha256:5c14d22737e6d5084ef4771b62d5d4363165b403455a30a1c8ca39dc7b644bef"}, + {file = "rich-13.7.0-py3-none-any.whl", hash = "sha256:6da14c108c4866ee9520bbffa71f6fe3962e193b7da68720583850cd4548e235"}, + {file = "rich-13.7.0.tar.gz", hash = "sha256:5cb5123b5cf9ee70584244246816e9114227e0b98ad9176eede6ad54bf5403fa"}, ] [package.dependencies] @@ -723,13 +723,13 @@ files = [ [[package]] name = "urllib3" -version = "2.0.6" +version = "2.0.7" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.7" files = [ - {file = "urllib3-2.0.6-py3-none-any.whl", hash = "sha256:7a7c7003b000adf9e7ca2a377c9688bbc54ed41b985789ed576570342a375cd2"}, - {file = "urllib3-2.0.6.tar.gz", hash = "sha256:b19e1a85d206b56d7df1d5e683df4a7725252a964e3993648dd0fb5a1c157564"}, + {file = "urllib3-2.0.7-py3-none-any.whl", hash = "sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e"}, + {file = "urllib3-2.0.7.tar.gz", hash = "sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84"}, ] [package.extras] diff --git a/benchmarks/pyproject.toml b/benchmarks/pyproject.toml index c0c82142..0c4019db 100644 --- a/benchmarks/pyproject.toml +++ b/benchmarks/pyproject.toml @@ -1,5 +1,5 @@ [tool.poetry] -name = "package" +name = "opensearch-py-benchmarks" version = "0.1.0" description = "OpenSearch Python client benchmarks." authors = ["Daniel Doubrovkine "] diff --git a/guides/async.md b/guides/async.md index 5de0971f..a3b4be66 100644 --- a/guides/async.md +++ b/guides/async.md @@ -11,7 +11,7 @@ # Asynchronous I/O -This client supports asynchronous I/O that improves performance and increases throughput. See [hello-async.py](../samples/hello/hello-async.py) or [knn-async-basics.py](../samples/knn/knn-async-basics.py) for a working asynchronous sample. +This client supports asynchronous I/O that improves performance and increases throughput. See [hello_async.py](../samples/hello/hello_async.py) or [knn_async_basics.py](../samples/knn/knn_async_basics.py) for a working asynchronous sample. ## Setup diff --git a/guides/bulk.md b/guides/bulk.md index 251be4f8..52057efd 100644 --- a/guides/bulk.md +++ b/guides/bulk.md @@ -10,7 +10,7 @@ The [Bulk API](https://opensearch.org/docs/latest/api-reference/document-apis/bu ## Line-Delimited JSON -The `bulk` API accepts line-delimited JSON. This method requires the caller to evaluate the return value and parse errors in the case of a failure or partial success. See [samples/bulk/bulk-ld.py](../samples/bulk/bulk-ld.py) for a working sample. +The `bulk` API accepts line-delimited JSON. This method requires the caller to evaluate the return value and parse errors in the case of a failure or partial success. See [samples/bulk/bulk_ld.py](../samples/bulk/bulk_ld.py) for a working sample. ```python from opensearchpy import OpenSearch @@ -33,7 +33,7 @@ else: print(f"Bulk-inserted {len(rc['items'])} items.") ``` -The client can also serialize an array of data into bulk-delimited JSON for you. See [samples/bulk/bulk-array.py](../samples/bulk/bulk-array.py) for a working sample. +The client can also serialize an array of data into bulk-delimited JSON for you. See [samples/bulk/bulk_array.py](../samples/bulk/bulk_array.py) for a working sample. ```python data = [ @@ -56,7 +56,7 @@ else: ## Bulk Helper -A helper can generate the line-delimited JSON for you from a Python array that contains `_index` and `_id` fields, and parse errors. The `helpers.bulk` implementation will raise `BulkIndexError` if any error occurs. This may indicate a partially successful result. See [samples/bulk/bulk-helpers.py](../samples/bulk/bulk-helpers.py) for a working sample. +A helper can generate the line-delimited JSON for you from a Python array that contains `_index` and `_id` fields, and parse errors. The `helpers.bulk` implementation will raise `BulkIndexError` if any error occurs. This may indicate a partially successful result. See [samples/bulk/bulk_helpers.py](../samples/bulk/bulk_helpers.py) for a working sample. ```python from opensearchpy import OpenSearch, helpers diff --git a/guides/plugins/knn.md b/guides/plugins/knn.md index a7775c88..8eea69c6 100644 --- a/guides/plugins/knn.md +++ b/guides/plugins/knn.md @@ -3,7 +3,7 @@ - [Create an Index](#create-an-index) - [Index Vectors](#index-vectors) - [Search for Nearest Neighbors](#search-for-nearest-neighbors) - - [Approximate k-NN with a Boolean Filter](#approximate-k-nn-with-a-boolean-filter) + - [Approximate k-NN with a Boolean Filter](#approximate-k-nn-with-a-boolean_filter) - [Approximate k-NN with an Efficient Filter](#approximate-k-nn-with-an-efficient-filter) # k-NN Plugin @@ -12,10 +12,10 @@ Short for k-nearest neighbors, the k-NN plugin enables users to search for the k ## Basic Approximate k-NN -In the following example we create a 5-dimensional k-NN index with random data. You can find a synchronous version of this working sample in [samples/knn/knn-basics.py](../../samples/knn/knn-basics.py) and an asynchronous one in [samples/knn/knn-async-basics.py](../../samples/knn/knn-async-basics.py). +In the following example we create a 5-dimensional k-NN index with random data. You can find a synchronous version of this working sample in [samples/knn/knn_basics.py](../../samples/knn/knn_basics.py) and an asynchronous one in [samples/knn/knn_async_basics.py](../../samples/knn/knn_async_basics.py). ```bash -$ poetry run python knn/knn-basics.py +$ poetry run python knn/knn_basics.py Searching for [0.61, 0.05, 0.16, 0.75, 0.49] ... {'_index': 'my-index', '_id': '3', '_score': 0.9252405, '_source': {'values': [0.64, 0.3, 0.27, 0.68, 0.51]}} @@ -93,10 +93,10 @@ for hit in results["hits"]["hits"]: ## Approximate k-NN with a Boolean Filter -In [the boolean-filter.py sample](../../samples/knn/knn-boolean-filter.py) we create a 5-dimensional k-NN index with random data and a `metadata` field that contains a book genre (e.g. `fiction`). The search query is a k-NN search filtered by genre. The filter clause is outside the k-NN query clause and is applied after the k-NN search. +In [the knn_boolean_filter.py sample](../../samples/knn/knn_boolean_filter.py) we create a 5-dimensional k-NN index with random data and a `metadata` field that contains a book genre (e.g. `fiction`). The search query is a k-NN search filtered by genre. The filter clause is outside the k-NN query clause and is applied after the k-NN search. ```bash -$ poetry run python knn/knn-boolean-filter.py +$ poetry run python knn/knn_boolean_filter.py Searching for [0.08, 0.42, 0.04, 0.76, 0.41] with the 'romance' genre ... @@ -106,10 +106,10 @@ Searching for [0.08, 0.42, 0.04, 0.76, 0.41] with the 'romance' genre ... ## Approximate k-NN with an Efficient Filter -In [the lucene-filter.py sample](../../samples/knn/knn-efficient-filter.py) we implement the example in [the k-NN documentation](https://opensearch.org/docs/latest/search-plugins/knn/filter-search-knn/), which creates an index that uses the Lucene engine and HNSW as the method in the mapping, containing hotel location and parking data, then search for the top three hotels near the location with the coordinates `[5, 4]` that are rated between 8 and 10, inclusive, and provide parking. +In [the knn_efficient_filter.py sample](../../samples/knn/knn_efficient_filter.py) we implement the example in [the k-NN documentation](https://opensearch.org/docs/latest/search-plugins/knn/filter-search-knn/), which creates an index that uses the Lucene engine and HNSW as the method in the mapping, containing hotel location and parking data, then search for the top three hotels near the location with the coordinates `[5, 4]` that are rated between 8 and 10, inclusive, and provide parking. ```bash -$ poetry run python knn/knn-efficient-filter.py +$ poetry run python knn/knn_efficient_filter.py {'_index': 'hotels-index', '_id': '3', '_score': 0.72992706, '_source': {'location': [4.9, 3.4], 'parking': 'true', 'rating': 9}} {'_index': 'hotels-index', '_id': '6', '_score': 0.3012048, '_source': {'location': [6.4, 3.4], 'parking': 'true', 'rating': 9}} diff --git a/noxfile.py b/noxfile.py index 325b7aa8..be71ac0b 100644 --- a/noxfile.py +++ b/noxfile.py @@ -65,7 +65,7 @@ def format(session: Any) -> None: session.run("isort", *SOURCE_FILES) session.run("black", *SOURCE_FILES) - session.run("python", "utils/license-headers.py", "fix", *SOURCE_FILES) + session.run("python", "utils/license_headers.py", "fix", *SOURCE_FILES) lint(session) @@ -91,7 +91,7 @@ def lint(session: Any) -> None: session.run("black", "--check", *SOURCE_FILES) session.run("flake8", *SOURCE_FILES) session.run("pylint", *SOURCE_FILES) - session.run("python", "utils/license-headers.py", "check", *SOURCE_FILES) + session.run("python", "utils/license_headers.py", "check", *SOURCE_FILES) # Workaround to make '-r' to still work despite uninstalling aiohttp below. session.run("python", "-m", "pip", "install", "aiohttp") @@ -120,5 +120,5 @@ def docs(session: Any) -> None: @nox.session() # type: ignore def generate(session: Any) -> None: session.install("-rdev-requirements.txt") - session.run("python", "utils/generate-api.py") + session.run("python", "utils/generate_api.py") format(session) diff --git a/opensearchpy/_async/client/__init__.py b/opensearchpy/_async/client/__init__.py index fa8b5f04..5d91ba6e 100644 --- a/opensearchpy/_async/client/__init__.py +++ b/opensearchpy/_async/client/__init__.py @@ -30,7 +30,7 @@ # THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. # # To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate_api.py # or the "OpenSearch API specification" available at: # https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json # ----------------------------------------------------- diff --git a/opensearchpy/_async/client/cat.py b/opensearchpy/_async/client/cat.py index 4310511c..b0eb0cd1 100644 --- a/opensearchpy/_async/client/cat.py +++ b/opensearchpy/_async/client/cat.py @@ -30,7 +30,7 @@ # THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. # # To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate_api.py # or the "OpenSearch API specification" available at: # https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json # ----------------------------------------------------- diff --git a/opensearchpy/_async/client/cluster.py b/opensearchpy/_async/client/cluster.py index 905853e9..06346102 100644 --- a/opensearchpy/_async/client/cluster.py +++ b/opensearchpy/_async/client/cluster.py @@ -30,7 +30,7 @@ # THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. # # To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate_api.py # or the "OpenSearch API specification" available at: # https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json # ----------------------------------------------------- diff --git a/opensearchpy/_async/client/dangling_indices.py b/opensearchpy/_async/client/dangling_indices.py index 6bc9a343..0f5752ee 100644 --- a/opensearchpy/_async/client/dangling_indices.py +++ b/opensearchpy/_async/client/dangling_indices.py @@ -30,7 +30,7 @@ # THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. # # To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate_api.py # or the "OpenSearch API specification" available at: # https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json # ----------------------------------------------------- diff --git a/opensearchpy/_async/client/indices.py b/opensearchpy/_async/client/indices.py index a4ef8b5b..c0361364 100644 --- a/opensearchpy/_async/client/indices.py +++ b/opensearchpy/_async/client/indices.py @@ -30,7 +30,7 @@ # THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. # # To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate_api.py # or the "OpenSearch API specification" available at: # https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json # ----------------------------------------------------- diff --git a/opensearchpy/_async/client/ingest.py b/opensearchpy/_async/client/ingest.py index 2f8cff27..240a466b 100644 --- a/opensearchpy/_async/client/ingest.py +++ b/opensearchpy/_async/client/ingest.py @@ -30,7 +30,7 @@ # THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. # # To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate_api.py # or the "OpenSearch API specification" available at: # https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json # ----------------------------------------------------- diff --git a/opensearchpy/_async/client/nodes.py b/opensearchpy/_async/client/nodes.py index 36146fad..f1bc45d5 100644 --- a/opensearchpy/_async/client/nodes.py +++ b/opensearchpy/_async/client/nodes.py @@ -30,7 +30,7 @@ # THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. # # To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate_api.py # or the "OpenSearch API specification" available at: # https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json # ----------------------------------------------------- diff --git a/opensearchpy/_async/client/remote_store.py b/opensearchpy/_async/client/remote_store.py index 8a72f41c..9fff40d5 100644 --- a/opensearchpy/_async/client/remote_store.py +++ b/opensearchpy/_async/client/remote_store.py @@ -12,7 +12,7 @@ # THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. # # To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate_api.py # or the "OpenSearch API specification" available at: # https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json # ----------------------------------------------------- diff --git a/opensearchpy/_async/client/security.py b/opensearchpy/_async/client/security.py index dc893f86..01545654 100644 --- a/opensearchpy/_async/client/security.py +++ b/opensearchpy/_async/client/security.py @@ -12,7 +12,7 @@ # THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. # # To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate_api.py # or the "OpenSearch API specification" available at: # https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json # ----------------------------------------------------- diff --git a/opensearchpy/_async/client/snapshot.py b/opensearchpy/_async/client/snapshot.py index 97ffec72..d5b08814 100644 --- a/opensearchpy/_async/client/snapshot.py +++ b/opensearchpy/_async/client/snapshot.py @@ -30,7 +30,7 @@ # THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. # # To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate_api.py # or the "OpenSearch API specification" available at: # https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json # ----------------------------------------------------- diff --git a/opensearchpy/_async/client/tasks.py b/opensearchpy/_async/client/tasks.py index 39aefe93..3a9c02fc 100644 --- a/opensearchpy/_async/client/tasks.py +++ b/opensearchpy/_async/client/tasks.py @@ -30,7 +30,7 @@ # THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. # # To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate_api.py # or the "OpenSearch API specification" available at: # https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json # ----------------------------------------------------- diff --git a/opensearchpy/_async/helpers/index.py b/opensearchpy/_async/helpers/index.py index 4f2a9918..42d63dfd 100644 --- a/opensearchpy/_async/helpers/index.py +++ b/opensearchpy/_async/helpers/index.py @@ -260,7 +260,7 @@ def search(self, using: Any = None) -> Any: using=using or self._using, index=self._name, doc_type=self._doc_types ) - def updateByQuery(self, using: Any = None) -> Any: + def updateByQuery(self, using: Any = None) -> Any: # pylint: disable=invalid-name """ Return a :class:`~opensearchpy.AsyncUpdateByQuery` object searching over the index (or all the indices belonging to this template) and updating Documents that match diff --git a/opensearchpy/client/__init__.py b/opensearchpy/client/__init__.py index a9c71552..95d652b6 100644 --- a/opensearchpy/client/__init__.py +++ b/opensearchpy/client/__init__.py @@ -30,7 +30,7 @@ # THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. # # To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate_api.py # or the "OpenSearch API specification" available at: # https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json # ----------------------------------------------------- diff --git a/opensearchpy/client/cat.py b/opensearchpy/client/cat.py index 91adbf35..0b98f9c3 100644 --- a/opensearchpy/client/cat.py +++ b/opensearchpy/client/cat.py @@ -30,7 +30,7 @@ # THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. # # To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate_api.py # or the "OpenSearch API specification" available at: # https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json # ----------------------------------------------------- diff --git a/opensearchpy/client/cluster.py b/opensearchpy/client/cluster.py index f2770f2d..1d6d7bd0 100644 --- a/opensearchpy/client/cluster.py +++ b/opensearchpy/client/cluster.py @@ -30,7 +30,7 @@ # THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. # # To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate_api.py # or the "OpenSearch API specification" available at: # https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json # ----------------------------------------------------- diff --git a/opensearchpy/client/dangling_indices.py b/opensearchpy/client/dangling_indices.py index 8617708e..1624434b 100644 --- a/opensearchpy/client/dangling_indices.py +++ b/opensearchpy/client/dangling_indices.py @@ -30,7 +30,7 @@ # THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. # # To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate_api.py # or the "OpenSearch API specification" available at: # https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json # ----------------------------------------------------- diff --git a/opensearchpy/client/indices.py b/opensearchpy/client/indices.py index 7cdc7e57..3c885298 100644 --- a/opensearchpy/client/indices.py +++ b/opensearchpy/client/indices.py @@ -30,7 +30,7 @@ # THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. # # To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate_api.py # or the "OpenSearch API specification" available at: # https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json # ----------------------------------------------------- diff --git a/opensearchpy/client/ingest.py b/opensearchpy/client/ingest.py index 4bf558b9..bd5ad3c3 100644 --- a/opensearchpy/client/ingest.py +++ b/opensearchpy/client/ingest.py @@ -30,7 +30,7 @@ # THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. # # To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate_api.py # or the "OpenSearch API specification" available at: # https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json # ----------------------------------------------------- diff --git a/opensearchpy/client/nodes.py b/opensearchpy/client/nodes.py index 6a7b5db1..8aeafb81 100644 --- a/opensearchpy/client/nodes.py +++ b/opensearchpy/client/nodes.py @@ -30,7 +30,7 @@ # THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. # # To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate_api.py # or the "OpenSearch API specification" available at: # https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json # ----------------------------------------------------- diff --git a/opensearchpy/client/remote_store.py b/opensearchpy/client/remote_store.py index a019a99c..e6e0a4c1 100644 --- a/opensearchpy/client/remote_store.py +++ b/opensearchpy/client/remote_store.py @@ -12,7 +12,7 @@ # THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. # # To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate_api.py # or the "OpenSearch API specification" available at: # https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json # ----------------------------------------------------- diff --git a/opensearchpy/client/security.py b/opensearchpy/client/security.py index 6d1574ea..3f8e6237 100644 --- a/opensearchpy/client/security.py +++ b/opensearchpy/client/security.py @@ -12,7 +12,7 @@ # THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. # # To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate_api.py # or the "OpenSearch API specification" available at: # https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json # ----------------------------------------------------- diff --git a/opensearchpy/client/snapshot.py b/opensearchpy/client/snapshot.py index fe6536fa..2482d038 100644 --- a/opensearchpy/client/snapshot.py +++ b/opensearchpy/client/snapshot.py @@ -30,7 +30,7 @@ # THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. # # To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate_api.py # or the "OpenSearch API specification" available at: # https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json # ----------------------------------------------------- diff --git a/opensearchpy/client/tasks.py b/opensearchpy/client/tasks.py index 7e675233..72d0bddf 100644 --- a/opensearchpy/client/tasks.py +++ b/opensearchpy/client/tasks.py @@ -30,7 +30,7 @@ # THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. # # To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate_api.py # or the "OpenSearch API specification" available at: # https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json # ----------------------------------------------------- diff --git a/opensearchpy/compat.py b/opensearchpy/compat.py index cb8bc7d7..ca874943 100644 --- a/opensearchpy/compat.py +++ b/opensearchpy/compat.py @@ -32,7 +32,7 @@ from urllib.parse import quote, quote_plus, unquote, urlencode, urlparse string_types = str, bytes -map = map +map = map # pylint: disable=invalid-name def to_str(x: Union[str, bytes], encoding: str = "ascii") -> str: diff --git a/opensearchpy/connection/base.py b/opensearchpy/connection/base.py index a2774c15..eda6adfe 100644 --- a/opensearchpy/connection/base.py +++ b/opensearchpy/connection/base.py @@ -46,9 +46,9 @@ # create the opensearchpy.trace logger, but only set propagate to False if the # logger hasn't already been configured -_tracer_already_configured = "opensearchpy.trace" in logging.Logger.manager.loggerDict +TRACER_ALREADY_CONFIGURED = "opensearchpy.trace" in logging.Logger.manager.loggerDict tracer = logging.getLogger("opensearchpy.trace") -if not _tracer_already_configured: +if not TRACER_ALREADY_CONFIGURED: tracer.propagate = False _WARNING_RE = re.compile(r"\"([^\"]*)\"") diff --git a/opensearchpy/helpers/aggs.py b/opensearchpy/helpers/aggs.py index 59795614..4e06e7d9 100644 --- a/opensearchpy/helpers/aggs.py +++ b/opensearchpy/helpers/aggs.py @@ -33,7 +33,9 @@ from .utils import DslBase -def A(name_or_agg: Any, filter: Any = None, **params: Any) -> Any: +def A( # pylint: disable=invalid-name + name_or_agg: Any, filter: Any = None, **params: Any +) -> Any: if filter is not None: if name_or_agg != "filter": raise ValueError( diff --git a/opensearchpy/helpers/function.py b/opensearchpy/helpers/function.py index 00452f86..f0885aa5 100644 --- a/opensearchpy/helpers/function.py +++ b/opensearchpy/helpers/function.py @@ -31,7 +31,7 @@ from .utils import DslBase -def SF(name_or_sf: Any, **params: Any) -> Any: +def SF(name_or_sf: Any, **params: Any) -> Any: # pylint: disable=invalid-name # {"script_score": {"script": "_score"}, "filter": {}} if isinstance(name_or_sf, collections_abc.Mapping): if params: diff --git a/opensearchpy/helpers/index.py b/opensearchpy/helpers/index.py index 3fbb475a..3b6185b4 100644 --- a/opensearchpy/helpers/index.py +++ b/opensearchpy/helpers/index.py @@ -279,7 +279,9 @@ def search(self, using: Optional[OpenSearch] = None) -> Search: using=using or self._using, index=self._name, doc_type=self._doc_types ) - def updateByQuery(self, using: Optional[OpenSearch] = None) -> UpdateByQuery: + def updateByQuery( # pylint: disable=invalid-name + self, using: Optional[OpenSearch] = None + ) -> UpdateByQuery: """ Return a :class:`~opensearchpy.UpdateByQuery` object searching over the index (or all the indices belonging to this template) and updating Documents that match diff --git a/opensearchpy/helpers/query.py b/opensearchpy/helpers/query.py index e299f94a..b7861f78 100644 --- a/opensearchpy/helpers/query.py +++ b/opensearchpy/helpers/query.py @@ -35,7 +35,9 @@ from .utils import DslBase -def Q(name_or_query: Any = "match_all", **params: Any) -> Any: +def Q( # pylint: disable=invalid-name + name_or_query: Any = "match_all", **params: Any +) -> Any: # {"match": {"title": "python"}} if isinstance(name_or_query, collections_abc.Mapping): if params: diff --git a/samples/advanced_index_actions/advanced_index_actions_sample.py b/samples/advanced_index_actions/advanced_index_actions_sample.py index 562f82e2..a8eb3859 100644 --- a/samples/advanced_index_actions/advanced_index_actions_sample.py +++ b/samples/advanced_index_actions/advanced_index_actions_sample.py @@ -18,7 +18,7 @@ # urllib3.disable_warnings() -def test_opensearch_examples() -> None: +def main() -> None: # Set up client = OpenSearch( hosts=["https://localhost:9200"], @@ -101,4 +101,4 @@ def test_opensearch_examples() -> None: if __name__ == "__main__": - test_opensearch_examples() + main() diff --git a/samples/aws/README.md b/samples/aws/README.md index 17ad4ee0..bdb30c2b 100644 --- a/samples/aws/README.md +++ b/samples/aws/README.md @@ -11,7 +11,7 @@ export AWS_REGION=us-west-2 export SERVICE=es # use "aoss" for OpenSearch Serverless. export ENDPOINT=https://....us-west-2.es.amazonaws.com -poetry run aws/search-urllib.py +poetry run aws/search_urllib.py ``` This will output the version of OpenSearch and a search result. diff --git a/samples/aws/search-requests.py b/samples/aws/search-requests.py deleted file mode 100644 index 0af366f0..00000000 --- a/samples/aws/search-requests.py +++ /dev/null @@ -1,69 +0,0 @@ -#!/usr/bin/env python - -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - -import logging -from os import environ -from time import sleep -from urllib.parse import urlparse - -from boto3 import Session - -from opensearchpy import OpenSearch, RequestsAWSV4SignerAuth, RequestsHttpConnection - -# verbose logging -logging.basicConfig(format="%(levelname)s:%(message)s", level=logging.INFO) - -# cluster endpoint, for example: my-test-domain.us-east-1.es.amazonaws.com -url = urlparse(environ["ENDPOINT"]) -region = environ.get("AWS_REGION", "us-east-1") -service = environ.get("SERVICE", "es") - -credentials = Session().get_credentials() - -auth = RequestsAWSV4SignerAuth(credentials, region, service) - -client = OpenSearch( - hosts=[{"host": url.netloc, "port": url.port or 443}], - http_auth=auth, - use_ssl=True, - verify_certs=True, - connection_class=RequestsHttpConnection, - timeout=30, -) - -# TODO: remove when OpenSearch Serverless adds support for / -if service == "es": - info = client.info() - print(f"{info['version']['distribution']}: {info['version']['number']}") - -# create an index -index = "movies" -client.indices.create(index=index) - -try: - # index data - document = {"director": "Bennett Miller", "title": "Moneyball", "year": 2011} - client.index(index=index, body=document, id="1") - - # wait for the document to index - sleep(1) - - # search for the document - results = client.search(body={"query": {"match": {"director": "miller"}}}) - for hit in results["hits"]["hits"]: - print(hit["_source"]) - - # delete the document - client.delete(index=index, id="1") -finally: - # delete the index - client.indices.delete(index=index) diff --git a/samples/aws/search-urllib3.py b/samples/aws/search-urllib3.py deleted file mode 100644 index 534caf40..00000000 --- a/samples/aws/search-urllib3.py +++ /dev/null @@ -1,69 +0,0 @@ -#!/usr/bin/env python - -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - -import logging -from os import environ -from time import sleep -from urllib.parse import urlparse - -from boto3 import Session - -from opensearchpy import OpenSearch, Urllib3AWSV4SignerAuth, Urllib3HttpConnection - -# verbose logging -logging.basicConfig(format="%(levelname)s:%(message)s", level=logging.INFO) - -# cluster endpoint, for example: my-test-domain.us-east-1.es.amazonaws.com -url = urlparse(environ["ENDPOINT"]) -region = environ.get("AWS_REGION", "us-east-1") -service = environ.get("SERVICE", "es") - -credentials = Session().get_credentials() - -auth = Urllib3AWSV4SignerAuth(credentials, region, service) - -client = OpenSearch( - hosts=[{"host": url.netloc, "port": url.port or 443}], - http_auth=auth, - use_ssl=True, - verify_certs=True, - connection_class=Urllib3HttpConnection, - timeout=30, -) - -# TODO: remove when OpenSearch Serverless adds support for / -if service == "es": - info = client.info() - print(f"{info['version']['distribution']}: {info['version']['number']}") - -# create an index -index = "movies" -client.indices.create(index=index) - -try: - # index data - document = {"director": "Bennett Miller", "title": "Moneyball", "year": 2011} - client.index(index=index, body=document, id="1") - - # wait for the document to index - sleep(1) - - # search for the document - results = client.search(body={"query": {"match": {"director": "miller"}}}) - for hit in results["hits"]["hits"]: - print(hit["_source"]) - - # delete the document - client.delete(index=index, id="1") -finally: - # delete the index - client.indices.delete(index=index) diff --git a/samples/aws/search_requests.py b/samples/aws/search_requests.py new file mode 100644 index 00000000..84c7f47a --- /dev/null +++ b/samples/aws/search_requests.py @@ -0,0 +1,75 @@ +#!/usr/bin/env python + +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + +import logging +from os import environ +from time import sleep +from urllib.parse import urlparse + +from boto3 import Session + +from opensearchpy import OpenSearch, RequestsAWSV4SignerAuth, RequestsHttpConnection + + +def main() -> None: + # verbose logging + logging.basicConfig(format="%(levelname)s:%(message)s", level=logging.INFO) + + # cluster endpoint, for example: my-test-domain.us-east-1.es.amazonaws.com + url = urlparse(environ["ENDPOINT"]) + region = environ.get("AWS_REGION", "us-east-1") + service = environ.get("SERVICE", "es") + + credentials = Session().get_credentials() + + auth = RequestsAWSV4SignerAuth(credentials, region, service) + + client = OpenSearch( + hosts=[{"host": url.netloc, "port": url.port or 443}], + http_auth=auth, + use_ssl=True, + verify_certs=True, + connection_class=RequestsHttpConnection, + timeout=30, + ) + + # TODO: remove when OpenSearch Serverless adds support for / + if service == "es": + info = client.info() + print(f"{info['version']['distribution']}: {info['version']['number']}") + + # create an index + index = "movies" + client.indices.create(index=index) + + try: + # index data + document = {"director": "Bennett Miller", "title": "Moneyball", "year": 2011} + client.index(index=index, body=document, id="1") + + # wait for the document to index + sleep(1) + + # search for the document + results = client.search(body={"query": {"match": {"director": "miller"}}}) + for hit in results["hits"]["hits"]: + print(hit["_source"]) + + # delete the document + client.delete(index=index, id="1") + finally: + # delete the index + client.indices.delete(index=index) + + +if __name__ == "__main__": + main() diff --git a/samples/aws/search_urllib3.py b/samples/aws/search_urllib3.py new file mode 100644 index 00000000..00581683 --- /dev/null +++ b/samples/aws/search_urllib3.py @@ -0,0 +1,75 @@ +#!/usr/bin/env python + +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + +import logging +from os import environ +from time import sleep +from urllib.parse import urlparse + +from boto3 import Session + +from opensearchpy import OpenSearch, Urllib3AWSV4SignerAuth, Urllib3HttpConnection + + +def main() -> None: + # verbose logging + logging.basicConfig(format="%(levelname)s:%(message)s", level=logging.INFO) + + # cluster endpoint, for example: my-test-domain.us-east-1.es.amazonaws.com + url = urlparse(environ["ENDPOINT"]) + region = environ.get("AWS_REGION", "us-east-1") + service = environ.get("SERVICE", "es") + + credentials = Session().get_credentials() + + auth = Urllib3AWSV4SignerAuth(credentials, region, service) + + client = OpenSearch( + hosts=[{"host": url.netloc, "port": url.port or 443}], + http_auth=auth, + use_ssl=True, + verify_certs=True, + connection_class=Urllib3HttpConnection, + timeout=30, + ) + + # TODO: remove when OpenSearch Serverless adds support for / + if service == "es": + info = client.info() + print(f"{info['version']['distribution']}: {info['version']['number']}") + + # create an index + index = "movies" + client.indices.create(index=index) + + try: + # index data + document = {"director": "Bennett Miller", "title": "Moneyball", "year": 2011} + client.index(index=index, body=document, id="1") + + # wait for the document to index + sleep(1) + + # search for the document + results = client.search(body={"query": {"match": {"director": "miller"}}}) + for hit in results["hits"]["hits"]: + print(hit["_source"]) + + # delete the document + client.delete(index=index, id="1") + finally: + # delete the index + client.indices.delete(index=index) + + +if __name__ == "__main__": + main() diff --git a/samples/bulk/bulk-array.py b/samples/bulk/bulk-array.py deleted file mode 100755 index 5191a291..00000000 --- a/samples/bulk/bulk-array.py +++ /dev/null @@ -1,63 +0,0 @@ -#!/usr/bin/env python - -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - - -import os -from typing import Any - -from opensearchpy import OpenSearch - -# connect to an instance of OpenSearch - -host = os.getenv("HOST", default="localhost") -port = int(os.getenv("PORT", 9200)) -auth = (os.getenv("USERNAME", "admin"), os.getenv("PASSWORD", "admin")) - -client = OpenSearch( - hosts=[{"host": host, "port": port}], - http_auth=auth, - use_ssl=True, - verify_certs=False, - ssl_show_warn=False, -) - -# check whether an index exists -index_name = "my-index" - -if not client.indices.exists(index_name): - client.indices.create( - index_name, - body={ - "mappings": { - "properties": { - "value": {"type": "float"}, - } - } - }, - ) - -# index data -data: Any = [] -for i in range(100): - data.append({"index": {"_index": index_name, "_id": i}}) - data.append({"value": i}) - -rc = client.bulk(data) -if rc["errors"]: - print("There were errors:") - for item in rc["items"]: - print(f"{item['index']['status']}: {item['index']['error']['type']}") -else: - print(f"Bulk-inserted {len(rc['items'])} items.") - -# delete index -client.indices.delete(index=index_name) diff --git a/samples/bulk/bulk-helpers.py b/samples/bulk/bulk-helpers.py deleted file mode 100755 index 678b2c09..00000000 --- a/samples/bulk/bulk-helpers.py +++ /dev/null @@ -1,106 +0,0 @@ -#!/usr/bin/env python - -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - - -import os -from typing import Any - -from opensearchpy import OpenSearch, helpers - -# connect to an instance of OpenSearch - -host = os.getenv("HOST", default="localhost") -port = int(os.getenv("PORT", 9200)) -auth = (os.getenv("USERNAME", "admin"), os.getenv("PASSWORD", "admin")) - -client = OpenSearch( - hosts=[{"host": host, "port": port}], - http_auth=auth, - use_ssl=True, - verify_certs=False, - ssl_show_warn=False, -) - -# check whether an index exists -index_name = "my-index" - -if not client.indices.exists(index_name): - client.indices.create( - index_name, - body={ - "mappings": { - "properties": { - "value": {"type": "float"}, - } - } - }, - ) - -# index data -data = [] -for i in range(100): - data.append({"_index": index_name, "_id": i, "value": i}) - -# serialized bulk raising an exception on error -rc = helpers.bulk(client, data) -print(f"Bulk-inserted {rc[0]} items (bulk).") - -# parallel bulk with explicit error checking -succeeded = [] -failed = [] -for success, item in helpers.parallel_bulk( - client, - actions=data, - chunk_size=10, - raise_on_error=False, - raise_on_exception=False, - max_chunk_bytes=20 * 1024 * 1024, - request_timeout=60, -): - if success: - succeeded.append(item) - else: - failed.append(item) - -if len(failed) > 0: - print(f"There were {len(failed)} errors:") - for item in failed: - print(item["index"]["error"]) - -if len(succeeded) > 0: - print(f"Bulk-inserted {len(succeeded)} items (parallel_bulk).") - - -# streaming bulk with a data generator -def _generate_data() -> Any: - for i in range(100): - yield {"_index": index_name, "_id": i, "value": i} - - -succeeded = [] -failed = [] -for success, item in helpers.streaming_bulk(client, actions=_generate_data()): - if success: - succeeded.append(item) - else: - failed.append(item) - -if len(failed) > 0: - print(f"There were {len(failed)} errors:") - for item in failed: - print(item["index"]["error"]) - -if len(succeeded) > 0: - print(f"Bulk-inserted {len(succeeded)} items (streaming_bulk).") - -# delete index -client.indices.delete(index=index_name) diff --git a/samples/bulk/bulk-ld.py b/samples/bulk/bulk-ld.py deleted file mode 100755 index fff0ae98..00000000 --- a/samples/bulk/bulk-ld.py +++ /dev/null @@ -1,63 +0,0 @@ -#!/usr/bin/env python - -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - - -import json -import os - -from opensearchpy import OpenSearch - -# connect to an instance of OpenSearch - -host = os.getenv("HOST", default="localhost") -port = int(os.getenv("PORT", 9200)) -auth = (os.getenv("USERNAME", "admin"), os.getenv("PASSWORD", "admin")) - -client = OpenSearch( - hosts=[{"host": host, "port": port}], - http_auth=auth, - use_ssl=True, - verify_certs=False, - ssl_show_warn=False, -) - -# check whether an index exists -index_name = "my-index" - -if not client.indices.exists(index_name): - client.indices.create( - index_name, - body={ - "mappings": { - "properties": { - "value": {"type": "float"}, - } - } - }, - ) - -# index data -data = "" -for i in range(100): - data += json.dumps({"index": {"_index": index_name, "_id": i}}) + "\n" - data += json.dumps({"value": i}) + "\n" - -rc = client.bulk(data) -if rc["errors"]: - print("There were errors:") - for item in rc["items"]: - print(f"{item['index']['status']}: {item['index']['error']['type']}") -else: - print(f"Bulk-inserted {len(rc['items'])} items.") - -# delete index -client.indices.delete(index=index_name) diff --git a/samples/bulk/bulk_array.py b/samples/bulk/bulk_array.py new file mode 100755 index 00000000..e8ea6a09 --- /dev/null +++ b/samples/bulk/bulk_array.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python + +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + + +import os +from typing import Any + +from opensearchpy import OpenSearch + + +def main() -> None: + # connect to an instance of OpenSearch + + host = os.getenv("HOST", default="localhost") + port = int(os.getenv("PORT", 9200)) + auth = (os.getenv("USERNAME", "admin"), os.getenv("PASSWORD", "admin")) + + client = OpenSearch( + hosts=[{"host": host, "port": port}], + http_auth=auth, + use_ssl=True, + verify_certs=False, + ssl_show_warn=False, + ) + + # check whether an index exists + index_name = "my-index" + + if not client.indices.exists(index_name): + client.indices.create( + index_name, + body={ + "mappings": { + "properties": { + "value": {"type": "float"}, + } + } + }, + ) + + # index data + data: Any = [] + for i in range(100): + data.append({"index": {"_index": index_name, "_id": i}}) + data.append({"value": i}) + + rc = client.bulk(data) + if rc["errors"]: + print("There were errors:") + for item in rc["items"]: + print(f"{item['index']['status']}: {item['index']['error']['type']}") + else: + print(f"Bulk-inserted {len(rc['items'])} items.") + + # delete index + client.indices.delete(index=index_name) + + +if __name__ == "__main__": + main() diff --git a/samples/bulk/bulk_helpers.py b/samples/bulk/bulk_helpers.py new file mode 100755 index 00000000..0468b0f5 --- /dev/null +++ b/samples/bulk/bulk_helpers.py @@ -0,0 +1,110 @@ +#!/usr/bin/env python + +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + + +import os +from typing import Any + +from opensearchpy import OpenSearch, helpers + + +def main() -> None: + # connect to an instance of OpenSearch + + host = os.getenv("HOST", default="localhost") + port = int(os.getenv("PORT", 9200)) + auth = (os.getenv("USERNAME", "admin"), os.getenv("PASSWORD", "admin")) + + client = OpenSearch( + hosts=[{"host": host, "port": port}], + http_auth=auth, + use_ssl=True, + verify_certs=False, + ssl_show_warn=False, + ) + + # check whether an index exists + index_name = "my-index" + + if not client.indices.exists(index_name): + client.indices.create( + index_name, + body={ + "mappings": { + "properties": { + "value": {"type": "float"}, + } + } + }, + ) + + # index data + data = [] + for i in range(100): + data.append({"_index": index_name, "_id": i, "value": i}) + + # serialized bulk raising an exception on error + rc = helpers.bulk(client, data) + print(f"Bulk-inserted {rc[0]} items (bulk).") + + # parallel bulk with explicit error checking + succeeded = [] + failed = [] + for success, item in helpers.parallel_bulk( + client, + actions=data, + chunk_size=10, + raise_on_error=False, + raise_on_exception=False, + max_chunk_bytes=20 * 1024 * 1024, + request_timeout=60, + ): + if success: + succeeded.append(item) + else: + failed.append(item) + + if len(failed) > 0: + print(f"There were {len(failed)} errors:") + for item in failed: + print(item["index"]["error"]) + + if len(succeeded) > 0: + print(f"Bulk-inserted {len(succeeded)} items (parallel_bulk).") + + # streaming bulk with a data generator + def _generate_data() -> Any: + for i in range(100): + yield {"_index": index_name, "_id": i, "value": i} + + succeeded = [] + failed = [] + for success, item in helpers.streaming_bulk(client, actions=_generate_data()): + if success: + succeeded.append(item) + else: + failed.append(item) + + if len(failed) > 0: + print(f"There were {len(failed)} errors:") + for item in failed: + print(item["index"]["error"]) + + if len(succeeded) > 0: + print(f"Bulk-inserted {len(succeeded)} items (streaming_bulk).") + + # delete index + client.indices.delete(index=index_name) + + +if __name__ == "__main__": + main() diff --git a/samples/bulk/bulk_ld.py b/samples/bulk/bulk_ld.py new file mode 100755 index 00000000..0bf556fa --- /dev/null +++ b/samples/bulk/bulk_ld.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python + +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + + +import json +import os + +from opensearchpy import OpenSearch + + +def main() -> None: + # connect to an instance of OpenSearch + + host = os.getenv("HOST", default="localhost") + port = int(os.getenv("PORT", 9200)) + auth = (os.getenv("USERNAME", "admin"), os.getenv("PASSWORD", "admin")) + + client = OpenSearch( + hosts=[{"host": host, "port": port}], + http_auth=auth, + use_ssl=True, + verify_certs=False, + ssl_show_warn=False, + ) + + # check whether an index exists + index_name = "my-index" + + if not client.indices.exists(index_name): + client.indices.create( + index_name, + body={ + "mappings": { + "properties": { + "value": {"type": "float"}, + } + } + }, + ) + + # index data + data = "" + for i in range(100): + data += json.dumps({"index": {"_index": index_name, "_id": i}}) + "\n" + data += json.dumps({"value": i}) + "\n" + + rc = client.bulk(data) + if rc["errors"]: + print("There were errors:") + for item in rc["items"]: + print(f"{item['index']['status']}: {item['index']['error']['type']}") + else: + print(f"Bulk-inserted {len(rc['items'])} items.") + + # delete index + client.indices.delete(index=index_name) + + +if __name__ == "__main__": + main() diff --git a/samples/document_lifecycle/document_lifecycle_sample.py b/samples/document_lifecycle/document_lifecycle_sample.py index 1d338da7..c21ae44a 100644 --- a/samples/document_lifecycle/document_lifecycle_sample.py +++ b/samples/document_lifecycle/document_lifecycle_sample.py @@ -17,78 +17,89 @@ # urllib3.disable_warnings() -# Connect to OpenSearch -client = OpenSearch( - hosts=["https://localhost:9200"], - use_ssl=True, - verify_certs=False, - http_auth=("admin", "admin"), -) - -# Create an index -index = "movies" -if not client.indices.exists(index=index): - client.indices.create(index=index) - -# Create documents -client.index(index=index, id=1, body={"title": "Beauty and the Beast", "year": 1991}) -client.index( - index=index, - id=2, - body={"title": "Beauty and the Beast - Live Action", "year": 2017}, -) - -# Index a document -client.index(index=index, id=2, body={"title": "The Lion King", "year": 1994}) - -# Create a document with auto-generated ID -result = client.index(index=index, body={"title": "The Lion King 2", "year": 1998}) -print(result) - -# Get a document -result = client.get(index=index, id=1)["_source"] -print(result) - -# Get a document with _source includes -result = client.get(index=index, id=1, _source_includes=["title"])["_source"] -print(result) - -# Get a document with _source excludes -result = client.get(index=index, id=1, _source_excludes=["title"])["_source"] -print(result) - -# Get multiple documents -result = client.mget(index=index, body={"docs": [{"_id": 1}, {"_id": 2}]})["docs"] -print(result) - -# Check if a document exists -result = client.exists(index=index, id=1) -print(result) - -# Update a document -client.update(index=index, id=1, body={"doc": {"year": 1995}}) - -# Update a document using script -client.update(index=index, id=1, body={"script": {"source": "ctx._source.year += 5"}}) - -# Update multiple documents by query -client.update_by_query( - index=index, - body={ - "script": {"source": "ctx._source.year -= 1"}, - "query": {"range": {"year": {"gt": 2023}}}, - }, -) - -# Delete a document -client.delete(index=index, id=1) - -# Delete a document with ignore 404 -client.delete(index=index, id=1, ignore=404) - -# Delete multiple documents by query -client.delete_by_query(index=index, body={"query": {"range": {"year": {"gt": 2023}}}}) - -# Delete the index -client.indices.delete(index=index) -print("Deleted index!") +def main() -> None: + # Connect to OpenSearch + client = OpenSearch( + hosts=["https://localhost:9200"], + use_ssl=True, + verify_certs=False, + http_auth=("admin", "admin"), + ) + + # Create an index + index = "movies" + if not client.indices.exists(index=index): + client.indices.create(index=index) + + # Create documents + client.index( + index=index, id=1, body={"title": "Beauty and the Beast", "year": 1991} + ) + client.index( + index=index, + id=2, + body={"title": "Beauty and the Beast - Live Action", "year": 2017}, + ) + + # Index a document + client.index(index=index, id=2, body={"title": "The Lion King", "year": 1994}) + + # Create a document with auto-generated ID + result = client.index(index=index, body={"title": "The Lion King 2", "year": 1998}) + print(result) + + # Get a document + result = client.get(index=index, id=1)["_source"] + print(result) + + # Get a document with _source includes + result = client.get(index=index, id=1, _source_includes=["title"])["_source"] + print(result) + + # Get a document with _source excludes + result = client.get(index=index, id=1, _source_excludes=["title"])["_source"] + print(result) + + # Get multiple documents + result = client.mget(index=index, body={"docs": [{"_id": 1}, {"_id": 2}]})["docs"] + print(result) + + # Check if a document exists + result = client.exists(index=index, id=1) + print(result) + + # Update a document + client.update(index=index, id=1, body={"doc": {"year": 1995}}) + + # Update a document using script + client.update( + index=index, id=1, body={"script": {"source": "ctx._source.year += 5"}} + ) + + # Update multiple documents by query + client.update_by_query( + index=index, + body={ + "script": {"source": "ctx._source.year -= 1"}, + "query": {"range": {"year": {"gt": 2023}}}, + }, + ) + + # Delete a document + client.delete(index=index, id=1) + + # Delete a document with ignore 404 + client.delete(index=index, id=1, ignore=404) + + # Delete multiple documents by query + client.delete_by_query( + index=index, body={"query": {"range": {"year": {"gt": 2023}}}} + ) + + # Delete the index + client.indices.delete(index=index) + print("Deleted index!") + + +if __name__ == "__main__": + main() diff --git a/samples/hello/hello.py b/samples/hello/hello.py index 0b589c9d..a614f085 100755 --- a/samples/hello/hello.py +++ b/samples/hello/hello.py @@ -15,62 +15,68 @@ # connect to OpenSearch -host = "localhost" -port = 9200 -auth = ("admin", "admin") # For testing only. Don't store credentials in code. -client = OpenSearch( - hosts=[{"host": host, "port": port}], - http_auth=auth, - use_ssl=True, - verify_certs=False, - ssl_show_warn=False, -) +def main() -> None: + host = "localhost" + port = 9200 + auth = ("admin", "admin") # For testing only. Don't store credentials in code. -info = client.info() -print(f"Welcome to {info['version']['distribution']} {info['version']['number']}!") + client = OpenSearch( + hosts=[{"host": host, "port": port}], + http_auth=auth, + use_ssl=True, + verify_certs=False, + ssl_show_warn=False, + ) -# create an index + info = client.info() + print(f"Welcome to {info['version']['distribution']} {info['version']['number']}!") -index_name = "test-index" + # create an index -index_body = {"settings": {"index": {"number_of_shards": 4}}} + index_name = "test-index" -response = client.indices.create(index_name, body=index_body) + index_body = {"settings": {"index": {"number_of_shards": 4}}} -print(response) + response = client.indices.create(index_name, body=index_body) -# add a document to the index + print(response) -document = {"title": "Moneyball", "director": "Bennett Miller", "year": "2011"} + # add a document to the index -id = "1" + document = {"title": "Moneyball", "director": "Bennett Miller", "year": "2011"} -response = client.index(index=index_name, body=document, id=id, refresh=True) + id = "1" -print(response) + response = client.index(index=index_name, body=document, id=id, refresh=True) -# search for a document + print(response) -q = "miller" + # search for a document -query = { - "size": 5, - "query": {"multi_match": {"query": q, "fields": ["title^2", "director"]}}, -} + q = "miller" -response = client.search(body=query, index=index_name) + query = { + "size": 5, + "query": {"multi_match": {"query": q, "fields": ["title^2", "director"]}}, + } -print(response) + response = client.search(body=query, index=index_name) -# delete the document + print(response) -response = client.delete(index=index_name, id=id) + # delete the document -print(response) + response = client.delete(index=index_name, id=id) -# delete the index + print(response) -response = client.indices.delete(index=index_name) + # delete the index -print(response) + response = client.indices.delete(index=index_name) + + print(response) + + +if __name__ == "__main__": + main() diff --git a/samples/hello/hello-async.py b/samples/hello/hello_async.py similarity index 100% rename from samples/hello/hello-async.py rename to samples/hello/hello_async.py diff --git a/samples/index_template/index_template_sample.py b/samples/index_template/index_template_sample.py index 4fe580ac..ca0f8310 100644 --- a/samples/index_template/index_template_sample.py +++ b/samples/index_template/index_template_sample.py @@ -11,119 +11,127 @@ # GitHub history for details. from opensearchpy import OpenSearch -# Create a client instance -client = OpenSearch( - hosts=["https://localhost:9200"], - use_ssl=True, - verify_certs=False, - http_auth=("admin", "admin"), -) - -# You can create an index template to define default settings and mappings for indices of certain patterns. The following example creates an index template named `books` with default settings and mappings for indices of the `books-*` pattern: -client.indices.put_index_template( - name="books", - body={ - "index_patterns": ["books-*"], - "priority": 1, - "template": { - "settings": {"index": {"number_of_shards": 3, "number_of_replicas": 0}}, - "mappings": { - "properties": { - "title": {"type": "text"}, - "author": {"type": "text"}, - "published_on": {"type": "date"}, - "pages": {"type": "integer"}, - } + +def main() -> None: + # Create a client instance + client = OpenSearch( + hosts=["https://localhost:9200"], + use_ssl=True, + verify_certs=False, + http_auth=("admin", "admin"), + ) + + # You can create an index template to define default settings and mappings for indices of certain patterns. + # The following example creates an index template named `books` with default settings and mappings for indices of the `books-*` pattern: + client.indices.put_index_template( + name="books", + body={ + "index_patterns": ["books-*"], + "priority": 1, + "template": { + "settings": {"index": {"number_of_shards": 3, "number_of_replicas": 0}}, + "mappings": { + "properties": { + "title": {"type": "text"}, + "author": {"type": "text"}, + "published_on": {"type": "date"}, + "pages": {"type": "integer"}, + } + }, }, }, - }, -) - -# Now, when you create an index that matches the `books-*` pattern, OpenSearch will automatically apply the template's settings and mappings to the index. Let's create an index named books-nonfiction and verify that its settings and mappings match those of the template: -client.indices.create(index="books-nonfiction") -print(client.indices.get(index="books-nonfiction")) - -# If multiple index templates match the index's name, OpenSearch will apply the template with the highest `priority`. The following example creates two index templates named `books-*` and `books-fiction-*` with different settings: -client.indices.put_index_template( - name="books", - body={ - "index_patterns": ["books-*"], - "priority": 1, - "template": { - "settings": {"index": {"number_of_shards": 3, "number_of_replicas": 0}} + ) + + # Now, when you create an index that matches the `books-*` pattern, OpenSearch will automatically apply the template's settings and mappings to the index. + # Let's create an index named books-nonfiction and verify that its settings and mappings match those of the template: + client.indices.create(index="books-nonfiction") + print(client.indices.get(index="books-nonfiction")) + + # If multiple index templates match the index's name, OpenSearch will apply the template with the highest `priority`. + # The following example creates two index templates named `books-*` and `books-fiction-*` with different settings: + client.indices.put_index_template( + name="books", + body={ + "index_patterns": ["books-*"], + "priority": 1, + "template": { + "settings": {"index": {"number_of_shards": 3, "number_of_replicas": 0}} + }, }, - }, -) - -client.indices.put_index_template( - name="books-fiction", - body={ - "index_patterns": ["books-fiction-*"], - "priority": 2, - "template": { - "settings": {"index": {"number_of_shards": 1, "number_of_replicas": 1}} + ) + + client.indices.put_index_template( + name="books-fiction", + body={ + "index_patterns": ["books-fiction-*"], + "priority": 2, + "template": { + "settings": {"index": {"number_of_shards": 1, "number_of_replicas": 1}} + }, }, - }, -) - -# # Test multiple index templates -client.indices.create(index="books-fiction-romance") -print(client.indices.get(index="books-fiction-romance")) - - -# Composable index templates are a new type of index template that allow you to define multiple component templates and compose them into a final template. The following example creates a component template named `books_mappings` with default mappings for indices of the `books-*` and `books-fiction-*` patterns: -client.cluster.put_component_template( - name="books_mappings", - body={ - "template": { - "mappings": { - "properties": { - "title": {"type": "text"}, - "author": {"type": "text"}, - "published_on": {"type": "date"}, - "pages": {"type": "integer"}, + ) + + # # Test multiple index templates + client.indices.create(index="books-fiction-romance") + print(client.indices.get(index="books-fiction-romance")) + + # Composable index templates are a new type of index template that allow you to define multiple component templates and compose them into a final template. + # The following example creates a component template named `books_mappings` with default mappings for indices of the `books-*` and `books-fiction-*` patterns: + client.cluster.put_component_template( + name="books_mappings", + body={ + "template": { + "mappings": { + "properties": { + "title": {"type": "text"}, + "author": {"type": "text"}, + "published_on": {"type": "date"}, + "pages": {"type": "integer"}, + } } } - } - }, -) - -client.indices.put_index_template( - name="books", - body={ - "index_patterns": ["books-*"], - "composed_of": ["books_mappings"], - "priority": 4, - "template": { - "settings": {"index": {"number_of_shards": 3, "number_of_replicas": 0}} }, - }, -) - -client.indices.put_index_template( - name="books-fiction", - body={ - "index_patterns": ["books-fiction-*"], - "composed_of": ["books_mappings"], - "priority": 5, - "template": { - "settings": {"index": {"number_of_shards": 1, "number_of_replicas": 1}} + ) + + client.indices.put_index_template( + name="books", + body={ + "index_patterns": ["books-*"], + "composed_of": ["books_mappings"], + "priority": 4, + "template": { + "settings": {"index": {"number_of_shards": 3, "number_of_replicas": 0}} + }, + }, + ) + + client.indices.put_index_template( + name="books-fiction", + body={ + "index_patterns": ["books-fiction-*"], + "composed_of": ["books_mappings"], + "priority": 5, + "template": { + "settings": {"index": {"number_of_shards": 1, "number_of_replicas": 1}} + }, }, - }, -) + ) + + # Test composable index templates + client.indices.create(index="books-fiction-horror") + print(client.indices.get(index="books-fiction-horror")) + # Get an index template + print(client.indices.get_index_template(name="books")) -# Test composable index templates -client.indices.create(index="books-fiction-horror") -print(client.indices.get(index="books-fiction-horror")) + # Delete an index template + client.indices.delete_index_template(name="books") -# Get an index template -print(client.indices.get_index_template(name="books")) + # Cleanup + client.indices.delete(index="books-*") + client.indices.delete_index_template(name="books-fiction") + client.cluster.delete_component_template(name="books_mappings") -# Delete an index template -client.indices.delete_index_template(name="books") -# Cleanup -client.indices.delete(index="books-*") -client.indices.delete_index_template(name="books-fiction") -client.cluster.delete_component_template(name="books_mappings") +if __name__ == "__main__": + main() diff --git a/samples/json/json-hello.py b/samples/json/json-hello.py deleted file mode 100755 index 5b39e41b..00000000 --- a/samples/json/json-hello.py +++ /dev/null @@ -1,66 +0,0 @@ -#!/usr/bin/env python - -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - - -from opensearchpy import OpenSearch - -# connect to OpenSearch - -host = "localhost" -port = 9200 -auth = ("admin", "admin") # For testing only. Don't store credentials in code. - -client = OpenSearch( - hosts=[{"host": host, "port": port}], - http_auth=auth, - use_ssl=True, - verify_certs=False, - ssl_show_warn=False, -) - -info = client.http.get("/") -print(f"Welcome to {info['version']['distribution']} {info['version']['number']}!") - -# create an index - -index_name = "movies" - -index_body = {"settings": {"index": {"number_of_shards": 4}}} - -print(client.http.put(f"/{index_name}", body=index_body)) - -# add a document to the index - -document = {"title": "Moneyball", "director": "Bennett Miller", "year": "2011"} - -id = "1" - -print(client.http.put(f"/{index_name}/_doc/{id}?refresh=true", body=document)) - -# search for a document - -q = "miller" - -query = { - "size": 5, - "query": {"multi_match": {"query": q, "fields": ["title^2", "director"]}}, -} - -print(client.http.post(f"/{index_name}/_search", body=query)) - -# delete the document - -print(client.http.delete(f"/{index_name}/_doc/{id}")) - -# delete the index - -print(client.http.delete(f"/{index_name}")) diff --git a/samples/json/json_hello.py b/samples/json/json_hello.py new file mode 100755 index 00000000..4b2e10e2 --- /dev/null +++ b/samples/json/json_hello.py @@ -0,0 +1,72 @@ +#!/usr/bin/env python + +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + + +from opensearchpy import OpenSearch + + +def main() -> None: + # connect to OpenSearch + + host = "localhost" + port = 9200 + auth = ("admin", "admin") # For testing only. Don't store credentials in code. + + client = OpenSearch( + hosts=[{"host": host, "port": port}], + http_auth=auth, + use_ssl=True, + verify_certs=False, + ssl_show_warn=False, + ) + + info = client.http.get("/") + print(f"Welcome to {info['version']['distribution']} {info['version']['number']}!") + + # create an index + + index_name = "movies" + + index_body = {"settings": {"index": {"number_of_shards": 4}}} + + print(client.http.put(f"/{index_name}", body=index_body)) + + # add a document to the index + + document = {"title": "Moneyball", "director": "Bennett Miller", "year": "2011"} + + id = "1" + + print(client.http.put(f"/{index_name}/_doc/{id}?refresh=true", body=document)) + + # search for a document + + q = "miller" + + query = { + "size": 5, + "query": {"multi_match": {"query": q, "fields": ["title^2", "director"]}}, + } + + print(client.http.post(f"/{index_name}/_search", body=query)) + + # delete the document + + print(client.http.delete(f"/{index_name}/_doc/{id}")) + + # delete the index + + print(client.http.delete(f"/{index_name}")) + + +if __name__ == "__main__": + main() diff --git a/samples/json/json-hello-async.py b/samples/json/json_hello_async.py similarity index 100% rename from samples/json/json-hello-async.py rename to samples/json/json_hello_async.py diff --git a/samples/knn/knn-basics.py b/samples/knn/knn-basics.py deleted file mode 100755 index 96efb028..00000000 --- a/samples/knn/knn-basics.py +++ /dev/null @@ -1,82 +0,0 @@ -#!/usr/bin/env python - -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - - -import os -import random - -from opensearchpy import OpenSearch, helpers - -# connect to an instance of OpenSearch - -host = os.getenv("HOST", default="localhost") -port = int(os.getenv("PORT", 9200)) -auth = (os.getenv("USERNAME", "admin"), os.getenv("PASSWORD", "admin")) - -client = OpenSearch( - hosts=[{"host": host, "port": port}], - http_auth=auth, - use_ssl=True, - verify_certs=False, - ssl_show_warn=False, -) - -# check whether an index exists -index_name = "my-index" -dimensions = 5 - -if not client.indices.exists(index_name): - client.indices.create( - index_name, - body={ - "settings": {"index.knn": True}, - "mappings": { - "properties": { - "values": {"type": "knn_vector", "dimension": dimensions}, - } - }, - }, - ) - -# index data -vectors = [] -for i in range(10): - vec = [] - for j in range(dimensions): - vec.append(round(random.uniform(0, 1), 2)) - - vectors.append( - { - "_index": index_name, - "_id": i, - "values": vec, - } - ) - -# bulk index -helpers.bulk(client, vectors) - -client.indices.refresh(index=index_name) - -# search -vec = [] -for j in range(dimensions): - vec.append(round(random.uniform(0, 1), 2)) -print(f"Searching for {vec} ...") - -search_query = {"query": {"knn": {"values": {"vector": vec, "k": 3}}}} -results = client.search(index=index_name, body=search_query) -for hit in results["hits"]["hits"]: - print(hit) - -# delete index -client.indices.delete(index=index_name) diff --git a/samples/knn/knn-boolean-filter.py b/samples/knn/knn-boolean-filter.py deleted file mode 100755 index 5ae7704c..00000000 --- a/samples/knn/knn-boolean-filter.py +++ /dev/null @@ -1,92 +0,0 @@ -#!/usr/bin/env python - -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - - -import os -import random - -from opensearchpy import OpenSearch, helpers - -# connect to an instance of OpenSearch - -host = os.getenv("HOST", default="localhost") -port = int(os.getenv("PORT", 9200)) -auth = (os.getenv("USERNAME", "admin"), os.getenv("PASSWORD", "admin")) - -client = OpenSearch( - hosts=[{"host": host, "port": port}], - http_auth=auth, - use_ssl=True, - verify_certs=False, - ssl_show_warn=False, -) - -# check whether an index exists -index_name = "my-index" -dimensions = 5 - -if not client.indices.exists(index_name): - client.indices.create( - index_name, - body={ - "settings": {"index.knn": True}, - "mappings": { - "properties": { - "values": {"type": "knn_vector", "dimension": dimensions}, - } - }, - }, - ) - -# index data -vectors = [] -genres = ["fiction", "drama", "romance"] -for i in range(3000): - vec = [] - for j in range(dimensions): - vec.append(round(random.uniform(0, 1), 2)) - - vectors.append( - { - "_index": index_name, - "_id": i, - "values": vec, - "metadata": {"genre": random.choice(genres)}, - } - ) - -# bulk index -helpers.bulk(client, vectors) - -client.indices.refresh(index=index_name) - -# search -genre = random.choice(genres) -vec = [] -for j in range(dimensions): - vec.append(round(random.uniform(0, 1), 2)) -print(f"Searching for {vec} with the '{genre}' genre ...") - -search_query = { - "query": { - "bool": { - "filter": {"bool": {"must": [{"term": {"metadata.genre": genre}}]}}, - "must": {"knn": {"values": {"vector": vec, "k": 5}}}, - } - } -} -results = client.search(index=index_name, body=search_query) -for hit in results["hits"]["hits"]: - print(hit) - -# delete index -client.indices.delete(index=index_name) diff --git a/samples/knn/knn-efficient-filter.py b/samples/knn/knn-efficient-filter.py deleted file mode 100755 index cbfd41ad..00000000 --- a/samples/knn/knn-efficient-filter.py +++ /dev/null @@ -1,180 +0,0 @@ -#!/usr/bin/env python - -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - - -import os - -from opensearchpy import OpenSearch, helpers - -# connect to an instance of OpenSearch - -host = os.getenv("HOST", default="localhost") -port = int(os.getenv("PORT", 9200)) -auth = (os.getenv("USERNAME", "admin"), os.getenv("PASSWORD", "admin")) - -client = OpenSearch( - hosts=[{"host": host, "port": port}], - http_auth=auth, - use_ssl=True, - verify_certs=False, - ssl_show_warn=False, -) - -# check whether an index exists -index_name = "hotels-index" - -if not client.indices.exists(index_name): - client.indices.create( - index_name, - body={ - "settings": { - "index.knn": True, - "knn.algo_param.ef_search": 100, - "number_of_shards": 1, - "number_of_replicas": 0, - }, - "mappings": { - "properties": { - "location": { - "type": "knn_vector", - "dimension": 2, - "method": { - "name": "hnsw", - "space_type": "l2", - "engine": "lucene", - "parameters": {"ef_construction": 100, "m": 16}, - }, - }, - } - }, - }, - ) - -# index data -vectors = [ - { - "_index": "hotels-index", - "_id": "1", - "location": [5.2, 4.4], - "parking": "true", - "rating": 5, - }, - { - "_index": "hotels-index", - "_id": "2", - "location": [5.2, 3.9], - "parking": "false", - "rating": 4, - }, - { - "_index": "hotels-index", - "_id": "3", - "location": [4.9, 3.4], - "parking": "true", - "rating": 9, - }, - { - "_index": "hotels-index", - "_id": "4", - "location": [4.2, 4.6], - "parking": "false", - "rating": 6, - }, - { - "_index": "hotels-index", - "_id": "5", - "location": [3.3, 4.5], - "parking": "true", - "rating": 8, - }, - { - "_index": "hotels-index", - "_id": "6", - "location": [6.4, 3.4], - "parking": "true", - "rating": 9, - }, - { - "_index": "hotels-index", - "_id": "7", - "location": [4.2, 6.2], - "parking": "true", - "rating": 5, - }, - { - "_index": "hotels-index", - "_id": "8", - "location": [2.4, 4.0], - "parking": "true", - "rating": 8, - }, - { - "_index": "hotels-index", - "_id": "9", - "location": [1.4, 3.2], - "parking": "false", - "rating": 5, - }, - { - "_index": "hotels-index", - "_id": "10", - "location": [7.0, 9.9], - "parking": "true", - "rating": 9, - }, - { - "_index": "hotels-index", - "_id": "11", - "location": [3.0, 2.3], - "parking": "false", - "rating": 6, - }, - { - "_index": "hotels-index", - "_id": "12", - "location": [5.0, 1.0], - "parking": "true", - "rating": 3, - }, -] - -helpers.bulk(client, vectors) - -client.indices.refresh(index=index_name) - -# search -search_query = { - "size": 3, - "query": { - "knn": { - "location": { - "vector": [5, 4], - "k": 3, - "filter": { - "bool": { - "must": [ - {"range": {"rating": {"gte": 8, "lte": 10}}}, - {"term": {"parking": "true"}}, - ] - } - }, - } - } - }, -} - -results = client.search(index=index_name, body=search_query) -for hit in results["hits"]["hits"]: - print(hit) - -# delete index -client.indices.delete(index=index_name) diff --git a/samples/knn/knn-async-basics.py b/samples/knn/knn_async_basics.py similarity index 100% rename from samples/knn/knn-async-basics.py rename to samples/knn/knn_async_basics.py diff --git a/samples/knn/knn_basics.py b/samples/knn/knn_basics.py new file mode 100755 index 00000000..c74344b2 --- /dev/null +++ b/samples/knn/knn_basics.py @@ -0,0 +1,88 @@ +#!/usr/bin/env python + +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + + +import os +import random + +from opensearchpy import OpenSearch, helpers + + +def main() -> None: + # connect to an instance of OpenSearch + + host = os.getenv("HOST", default="localhost") + port = int(os.getenv("PORT", 9200)) + auth = (os.getenv("USERNAME", "admin"), os.getenv("PASSWORD", "admin")) + + client = OpenSearch( + hosts=[{"host": host, "port": port}], + http_auth=auth, + use_ssl=True, + verify_certs=False, + ssl_show_warn=False, + ) + + # check whether an index exists + index_name = "my-index" + dimensions = 5 + + if not client.indices.exists(index_name): + client.indices.create( + index_name, + body={ + "settings": {"index.knn": True}, + "mappings": { + "properties": { + "values": {"type": "knn_vector", "dimension": dimensions}, + } + }, + }, + ) + + # index data + vectors = [] + for i in range(10): + vec = [] + for j in range(dimensions): + vec.append(round(random.uniform(0, 1), 2)) + + vectors.append( + { + "_index": index_name, + "_id": i, + "values": vec, + } + ) + + # bulk index + helpers.bulk(client, vectors) + + client.indices.refresh(index=index_name) + + # search + vec = [] + for j in range(dimensions): + vec.append(round(random.uniform(0, 1), 2)) + print(f"Searching for {vec} ...") + + search_query = {"query": {"knn": {"values": {"vector": vec, "k": 3}}}} + results = client.search(index=index_name, body=search_query) + for hit in results["hits"]["hits"]: + print(hit) + + # delete index + client.indices.delete(index=index_name) + + +if __name__ == "__main__": + main() diff --git a/samples/knn/knn_boolean_filter.py b/samples/knn/knn_boolean_filter.py new file mode 100755 index 00000000..710216f2 --- /dev/null +++ b/samples/knn/knn_boolean_filter.py @@ -0,0 +1,98 @@ +#!/usr/bin/env python + +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + + +import os +import random + +from opensearchpy import OpenSearch, helpers + + +def main() -> None: + # connect to an instance of OpenSearch + + host = os.getenv("HOST", default="localhost") + port = int(os.getenv("PORT", 9200)) + auth = (os.getenv("USERNAME", "admin"), os.getenv("PASSWORD", "admin")) + + client = OpenSearch( + hosts=[{"host": host, "port": port}], + http_auth=auth, + use_ssl=True, + verify_certs=False, + ssl_show_warn=False, + ) + + # check whether an index exists + index_name = "my-index" + dimensions = 5 + + if not client.indices.exists(index_name): + client.indices.create( + index_name, + body={ + "settings": {"index.knn": True}, + "mappings": { + "properties": { + "values": {"type": "knn_vector", "dimension": dimensions}, + } + }, + }, + ) + + # index data + vectors = [] + genres = ["fiction", "drama", "romance"] + for i in range(3000): + vec = [] + for j in range(dimensions): + vec.append(round(random.uniform(0, 1), 2)) + + vectors.append( + { + "_index": index_name, + "_id": i, + "values": vec, + "metadata": {"genre": random.choice(genres)}, + } + ) + + # bulk index + helpers.bulk(client, vectors) + + client.indices.refresh(index=index_name) + + # search + genre = random.choice(genres) + vec = [] + for j in range(dimensions): + vec.append(round(random.uniform(0, 1), 2)) + print(f"Searching for {vec} with the '{genre}' genre ...") + + search_query = { + "query": { + "bool": { + "filter": {"bool": {"must": [{"term": {"metadata.genre": genre}}]}}, + "must": {"knn": {"values": {"vector": vec, "k": 5}}}, + } + } + } + results = client.search(index=index_name, body=search_query) + for hit in results["hits"]["hits"]: + print(hit) + + # delete index + client.indices.delete(index=index_name) + + +if __name__ == "__main__": + main() diff --git a/samples/knn/knn_efficient_filter.py b/samples/knn/knn_efficient_filter.py new file mode 100755 index 00000000..dfe1308f --- /dev/null +++ b/samples/knn/knn_efficient_filter.py @@ -0,0 +1,186 @@ +#!/usr/bin/env python + +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + + +import os + +from opensearchpy import OpenSearch, helpers + + +def main() -> None: + # connect to an instance of OpenSearch + + host = os.getenv("HOST", default="localhost") + port = int(os.getenv("PORT", 9200)) + auth = (os.getenv("USERNAME", "admin"), os.getenv("PASSWORD", "admin")) + + client = OpenSearch( + hosts=[{"host": host, "port": port}], + http_auth=auth, + use_ssl=True, + verify_certs=False, + ssl_show_warn=False, + ) + + # check whether an index exists + index_name = "hotels-index" + + if not client.indices.exists(index_name): + client.indices.create( + index_name, + body={ + "settings": { + "index.knn": True, + "knn.algo_param.ef_search": 100, + "number_of_shards": 1, + "number_of_replicas": 0, + }, + "mappings": { + "properties": { + "location": { + "type": "knn_vector", + "dimension": 2, + "method": { + "name": "hnsw", + "space_type": "l2", + "engine": "lucene", + "parameters": {"ef_construction": 100, "m": 16}, + }, + }, + } + }, + }, + ) + + # index data + vectors = [ + { + "_index": "hotels-index", + "_id": "1", + "location": [5.2, 4.4], + "parking": "true", + "rating": 5, + }, + { + "_index": "hotels-index", + "_id": "2", + "location": [5.2, 3.9], + "parking": "false", + "rating": 4, + }, + { + "_index": "hotels-index", + "_id": "3", + "location": [4.9, 3.4], + "parking": "true", + "rating": 9, + }, + { + "_index": "hotels-index", + "_id": "4", + "location": [4.2, 4.6], + "parking": "false", + "rating": 6, + }, + { + "_index": "hotels-index", + "_id": "5", + "location": [3.3, 4.5], + "parking": "true", + "rating": 8, + }, + { + "_index": "hotels-index", + "_id": "6", + "location": [6.4, 3.4], + "parking": "true", + "rating": 9, + }, + { + "_index": "hotels-index", + "_id": "7", + "location": [4.2, 6.2], + "parking": "true", + "rating": 5, + }, + { + "_index": "hotels-index", + "_id": "8", + "location": [2.4, 4.0], + "parking": "true", + "rating": 8, + }, + { + "_index": "hotels-index", + "_id": "9", + "location": [1.4, 3.2], + "parking": "false", + "rating": 5, + }, + { + "_index": "hotels-index", + "_id": "10", + "location": [7.0, 9.9], + "parking": "true", + "rating": 9, + }, + { + "_index": "hotels-index", + "_id": "11", + "location": [3.0, 2.3], + "parking": "false", + "rating": 6, + }, + { + "_index": "hotels-index", + "_id": "12", + "location": [5.0, 1.0], + "parking": "true", + "rating": 3, + }, + ] + + helpers.bulk(client, vectors) + + client.indices.refresh(index=index_name) + + # search + search_query = { + "size": 3, + "query": { + "knn": { + "location": { + "vector": [5, 4], + "k": 3, + "filter": { + "bool": { + "must": [ + {"range": {"rating": {"gte": 8, "lte": 10}}}, + {"term": {"parking": "true"}}, + ] + } + }, + } + } + }, + } + + results = client.search(index=index_name, body=search_query) + for hit in results["hits"]["hits"]: + print(hit) + + # delete index + client.indices.delete(index=index_name) + + +if __name__ == "__main__": + main() diff --git a/samples/security/roles.py b/samples/security/roles.py index 8a2d1ef5..37558042 100644 --- a/samples/security/roles.py +++ b/samples/security/roles.py @@ -15,43 +15,49 @@ from opensearchpy import OpenSearch -# connect to OpenSearch - -host = "localhost" -port = 9200 -auth = ("admin", "admin") # For testing only. Don't store credentials in code. - -client = OpenSearch( - hosts=[{"host": host, "port": port}], - http_auth=auth, - use_ssl=True, - verify_certs=False, - ssl_show_warn=False, -) - -# Create a Role - -role_name = "test-role" - -role_content = { - "cluster_permissions": ["cluster_monitor"], - "index_permissions": [ - { - "index_patterns": ["index", "test-*"], - "allowed_actions": [ - "data_access", - "indices_monitor", - ], - } - ], -} - -response = client.security.create_role(role_name, body=role_content) -print(response) - -# Get a Role - -role_name = "test-role" - -response = client.security.get_role(role_name) -print(response) + +def main() -> None: + # connect to OpenSearch + + host = "localhost" + port = 9200 + auth = ("admin", "admin") # For testing only. Don't store credentials in code. + + client = OpenSearch( + hosts=[{"host": host, "port": port}], + http_auth=auth, + use_ssl=True, + verify_certs=False, + ssl_show_warn=False, + ) + + # Create a Role + + role_name = "test-role" + + role_content = { + "cluster_permissions": ["cluster_monitor"], + "index_permissions": [ + { + "index_patterns": ["index", "test-*"], + "allowed_actions": [ + "data_access", + "indices_monitor", + ], + } + ], + } + + response = client.security.create_role(role_name, body=role_content) + print(response) + + # Get a Role + + role_name = "test-role" + + response = client.security.get_role(role_name) + print(response) + + +if __name__ == "__main__": + main() diff --git a/samples/security/users.py b/samples/security/users.py index 0a778b8d..3e1e90f5 100644 --- a/samples/security/users.py +++ b/samples/security/users.py @@ -15,31 +15,37 @@ from opensearchpy import OpenSearch -# connect to OpenSearch -host = "localhost" -port = 9200 -auth = ("admin", "admin") # For testing only. Don't store credentials in code. +def main() -> None: + # connect to OpenSearch -client = OpenSearch( - hosts=[{"host": host, "port": port}], - http_auth=auth, - use_ssl=True, - verify_certs=False, - ssl_show_warn=False, -) + host = "localhost" + port = 9200 + auth = ("admin", "admin") # For testing only. Don't store credentials in code. -# Create a User + client = OpenSearch( + hosts=[{"host": host, "port": port}], + http_auth=auth, + use_ssl=True, + verify_certs=False, + ssl_show_warn=False, + ) -user_name = "test-user" -user_content = {"password": "opensearch@123", "opendistro_security_roles": []} + # Create a User -response = client.security.create_user(user_name, body=user_content) -print(response) + user_name = "test-user" + user_content = {"password": "opensearch@123", "opendistro_security_roles": []} -# Get a User + response = client.security.create_user(user_name, body=user_content) + print(response) -user_name = "test-user" + # Get a User -response = client.security.get_user(user_name) -print(response) + user_name = "test-user" + + response = client.security.get_user(user_name) + print(response) + + +if __name__ == "__main__": + main() diff --git a/setup.cfg b/setup.cfg index 0482faa6..3d5fd003 100644 --- a/setup.cfg +++ b/setup.cfg @@ -8,7 +8,7 @@ requires = python python-urllib3 ignore = E203, E266, E501, W503 max-line-length = 240 -[pytest] +[tool:pytest] junit_family=legacy asyncio_mode=auto @@ -24,7 +24,8 @@ ignore_missing_imports=True [pylint] max-line-length = 240 +good-names-rgxs = ^[_a-z][_a-z0-9]?$ # allow for 1-character variable names [pylint.MESSAGE CONTROL] disable = all -enable = line-too-long +enable = line-too-long, invalid-name diff --git a/setup.py b/setup.py index 6a0a5d13..6ad7254b 100644 --- a/setup.py +++ b/setup.py @@ -31,26 +31,26 @@ from setuptools import find_packages, setup -package_name = "opensearch-py" -package_version = "" -base_dir = abspath(dirname(__file__)) +PACKAGE_NAME = "opensearch-py" +PACKAGE_VERSION = "" +BASE_DIR = abspath(dirname(__file__)) -with open(join(base_dir, package_name.replace("-", ""), "_version.py")) as f: +with open(join(BASE_DIR, PACKAGE_NAME.replace("-", ""), "_version.py")) as f: data = f.read() m = re.search(r"^__versionstr__: str\s+=\s+[\"\']([^\"\']+)[\"\']", data, re.M) if m: - package_version = m.group(1) + PACKAGE_VERSION = m.group(1) else: raise Exception(f"Invalid version: {data}") -with open(join(base_dir, "README.md")) as f: +with open(join(BASE_DIR, "README.md")) as f: long_description = f.read().strip() -module_dir = package_name.replace("-", "") +MODULE_DIR = PACKAGE_NAME.replace("-", "") packages = [ package for package in find_packages(where=".", exclude=("test_opensearchpy*",)) - if package == module_dir or package.startswith(module_dir + ".") + if package == MODULE_DIR or package.startswith(MODULE_DIR + ".") ] install_requires = [ "urllib3>=1.26.18", @@ -77,13 +77,13 @@ generate_require = ["black", "jinja2"] setup( - name=package_name, + name=PACKAGE_NAME, description="Python client for OpenSearch", license="Apache-2.0", url="https://github.com/opensearch-project/opensearch-py", long_description=long_description, long_description_content_type="text/markdown", - version=package_version, + version=PACKAGE_VERSION, author="Aleksei Atavin, Denis Zalevskiy, Rushi Agrawal, Shephali Mittal", author_email="axeo@aiven.io, dez@aiven.io, rushi.agr@gmail.com, shephalm@amazon.com", maintainer="Aleksei Atavin, Denis Zalevskiy, Rushi Agrawal, Shephali Mittal", diff --git a/test_opensearchpy/test_async/test_connection.py b/test_opensearchpy/test_async/test_connection.py index 743add7b..c9c0dc17 100644 --- a/test_opensearchpy/test_async/test_connection.py +++ b/test_opensearchpy/test_async/test_connection.py @@ -45,7 +45,7 @@ from opensearchpy.compat import reraise_exceptions from opensearchpy.connection import Connection, async_connections from opensearchpy.exceptions import ConnectionError, NotFoundError, TransportError -from test_opensearchpy.TestHttpServer import TestHTTPServer +from test_opensearchpy.test_http_server import TestHTTPServer pytestmark: MarkDecorator = pytest.mark.asyncio diff --git a/test_opensearchpy/test_async/test_helpers/test_document.py b/test_opensearchpy/test_async/test_helpers/test_document.py index d6ef0128..ff88fd6d 100644 --- a/test_opensearchpy/test_async/test_helpers/test_document.py +++ b/test_opensearchpy/test_async/test_helpers/test_document.py @@ -127,23 +127,23 @@ class Index: async def test_range_serializes_properly() -> None: - class D(document.AsyncDocument): + class DocumentD(document.AsyncDocument): lr: Any = field.LongRange() - d = D(lr=Range(lt=42)) + d = DocumentD(lr=Range(lt=42)) assert 40 in d.lr assert 47 not in d.lr assert {"lr": {"lt": 42}} == d.to_dict() - d = D(lr={"lt": 42}) + d = DocumentD(lr={"lt": 42}) assert {"lr": {"lt": 42}} == d.to_dict() async def test_range_deserializes_properly() -> None: - class D(InnerDoc): + class DocumentD(InnerDoc): lr = field.LongRange() - d = D.from_opensearch({"lr": {"lt": 42}}, True) + d = DocumentD.from_opensearch({"lr": {"lt": 42}}, True) assert isinstance(d.lr, Range) assert 40 in d.lr assert 47 not in d.lr @@ -156,15 +156,15 @@ async def test_resolve_nested() -> None: async def test_conflicting_mapping_raises_error_in_index_to_dict() -> None: - class A(document.AsyncDocument): + class DocumentA(document.AsyncDocument): name = field.Text() - class B(document.AsyncDocument): + class DocumentB(document.AsyncDocument): name = field.Keyword() i = AsyncIndex("i") - i.document(A) - i.document(B) + i.document(DocumentA) + i.document(DocumentB) with raises(ValueError): i.to_dict() @@ -182,11 +182,11 @@ async def test_matches_uses_index() -> None: async def test_matches_with_no_name_always_matches() -> None: - class D(document.AsyncDocument): + class DocumentD(document.AsyncDocument): pass - assert D._matches({}) - assert D._matches({"_index": "whatever"}) + assert DocumentD._matches({}) + assert DocumentD._matches({"_index": "whatever"}) async def test_matches_accepts_wildcards() -> None: @@ -521,10 +521,10 @@ async def test_document_inheritance() -> None: async def test_child_class_can_override_parent() -> None: - class A(document.AsyncDocument): + class DocumentA(document.AsyncDocument): o = field.Object(dynamic=False, properties={"a": field.Text()}) - class B(A): + class DocumentB(DocumentA): o = field.Object(dynamic="strict", properties={"b": field.Text()}) assert { @@ -535,7 +535,7 @@ class B(A): "type": "object", } } - } == B._doc_type.mapping.to_dict() + } == DocumentB._doc_type.mapping.to_dict() async def test_meta_fields_are_stored_in_meta_and_ignored_by_to_dict() -> None: diff --git a/test_opensearchpy/test_async/test_server/__init__.py b/test_opensearchpy/test_async/test_server/__init__.py index 3541fdec..90cbf2f4 100644 --- a/test_opensearchpy/test_async/test_server/__init__.py +++ b/test_opensearchpy/test_async/test_server/__init__.py @@ -35,13 +35,13 @@ class AsyncOpenSearchTestCase(IsolatedAsyncioTestCase): # type: ignore - async def asyncSetUp(self) -> None: + async def asyncSetUp(self) -> None: # pylint: disable=invalid-name self.client = await get_test_client( verify_certs=False, http_auth=("admin", "admin") ) await add_connection("default", self.client) - async def asyncTearDown(self) -> None: + async def asyncTearDown(self) -> None: # pylint: disable=invalid-name wipe_cluster(self.client) if self.client: await self.client.close() diff --git a/test_opensearchpy/test_async/test_server_secured/test_security_plugin.py b/test_opensearchpy/test_async/test_server_secured/test_security_plugin.py index 6751ed29..e5638a67 100644 --- a/test_opensearchpy/test_async/test_server_secured/test_security_plugin.py +++ b/test_opensearchpy/test_async/test_server_secured/test_security_plugin.py @@ -41,13 +41,13 @@ class TestSecurityPlugin(IsolatedAsyncioTestCase): # type: ignore USER_NAME = "test-user" USER_CONTENT = {"password": "opensearchpy@123", "opendistro_security_roles": []} - async def asyncSetUp(self) -> None: + async def asyncSetUp(self) -> None: # pylint: disable=invalid-name self.client = await get_test_client( verify_certs=False, http_auth=("admin", "admin") ) await add_connection("default", self.client) - async def asyncTearDown(self) -> None: + async def asyncTearDown(self) -> None: # pylint: disable=invalid-name if self.client: await self.client.close() diff --git a/test_opensearchpy/test_async/test_transport.py b/test_opensearchpy/test_async/test_transport.py index b494f83f..179a573c 100644 --- a/test_opensearchpy/test_async/test_transport.py +++ b/test_opensearchpy/test_async/test_transport.py @@ -95,7 +95,7 @@ async def close(self) -> None: } }""" -CLUSTER_NODES_7x_PUBLISH_HOST = """{ +CLUSTER_NODES_7X_PUBLISH_HOST = """{ "_nodes" : { "total" : 1, "successful" : 1, @@ -270,7 +270,7 @@ async def test_add_connection(self) -> None: assert 2 == len(t.connection_pool.connections) assert "http://google.com:1234" == t.connection_pool.connections[1].host - async def test_request_will_fail_after_X_retries(self) -> None: + async def test_request_will_fail_after_x_retries(self) -> None: t: Any = AsyncTransport( [{"exception": ConnectionError(None, "abandon ship", Exception())}], connection_class=DummyConnection, @@ -453,7 +453,7 @@ async def test_sniff_7x_publish_host(self) -> None: # Test the response shaped when a 7.x node has publish_host set # and the returend data is shaped in the fqdn/ip:port format. t: Any = AsyncTransport( - [{"data": CLUSTER_NODES_7x_PUBLISH_HOST}], + [{"data": CLUSTER_NODES_7X_PUBLISH_HOST}], connection_class=DummyConnection, sniff_timeout=42, ) diff --git a/test_opensearchpy/test_connection/test_requests_http_connection.py b/test_opensearchpy/test_connection/test_requests_http_connection.py index 62adf39f..f175990d 100644 --- a/test_opensearchpy/test_connection/test_requests_http_connection.py +++ b/test_opensearchpy/test_connection/test_requests_http_connection.py @@ -43,7 +43,7 @@ RequestError, TransportError, ) -from test_opensearchpy.TestHttpServer import TestHTTPServer +from test_opensearchpy.test_http_server import TestHTTPServer from ..test_cases import TestCase diff --git a/test_opensearchpy/test_helpers/test_aggs.py b/test_opensearchpy/test_helpers/test_aggs.py index 8a23c218..97ae368a 100644 --- a/test_opensearchpy/test_helpers/test_aggs.py +++ b/test_opensearchpy/test_helpers/test_aggs.py @@ -59,14 +59,14 @@ def test_meta_from_dict() -> None: assert aggs.A(a.to_dict()) == a -def test_A_creates_proper_agg() -> None: +def test_aggs_creates_proper_agg() -> None: a = aggs.A("terms", field="tags") assert isinstance(a, aggs.Terms) assert a._params == {"field": "tags"} -def test_A_handles_nested_aggs_properly() -> None: +def test_aggs_handles_nested_aggs_properly() -> None: max_score = aggs.Max(field="score") a = aggs.A("terms", field="tags", aggs={"max_score": max_score}) @@ -74,12 +74,12 @@ def test_A_handles_nested_aggs_properly() -> None: assert a._params == {"field": "tags", "aggs": {"max_score": max_score}} -def test_A_passes_aggs_through() -> None: +def test_aggs_passes_aggs_through() -> None: a = aggs.A("terms", field="tags") assert aggs.A(a) is a -def test_A_from_dict() -> None: +def test_aggs_from_dict() -> None: d = { "terms": {"field": "tags"}, "aggs": {"per_author": {"terms": {"field": "author.raw"}}}, @@ -95,7 +95,7 @@ def test_A_from_dict() -> None: assert a.aggs.per_author == aggs.A("terms", field="author.raw") -def test_A_fails_with_incorrect_dict() -> None: +def test_aggs_fails_with_incorrect_dict() -> None: correct_d = { "terms": {"field": "tags"}, "aggs": {"per_author": {"terms": {"field": "author.raw"}}}, @@ -115,7 +115,7 @@ def test_A_fails_with_incorrect_dict() -> None: aggs.A(d) -def test_A_fails_with_agg_and_params() -> None: +def test_aggs_fails_with_agg_and_params() -> None: a = aggs.A("terms", field="tags") with raises(Exception): diff --git a/test_opensearchpy/test_helpers/test_document.py b/test_opensearchpy/test_helpers/test_document.py index 1a156ad8..bc6707a1 100644 --- a/test_opensearchpy/test_helpers/test_document.py +++ b/test_opensearchpy/test_helpers/test_document.py @@ -137,23 +137,23 @@ class Index: def test_range_serializes_properly() -> None: - class D(document.Document): + class DocumentD(document.Document): lr = field.LongRange() - d: Any = D(lr=Range(lt=42)) + d: Any = DocumentD(lr=Range(lt=42)) assert 40 in d.lr assert 47 not in d.lr assert {"lr": {"lt": 42}} == d.to_dict() - d = D(lr={"lt": 42}) + d = DocumentD(lr={"lt": 42}) assert {"lr": {"lt": 42}} == d.to_dict() def test_range_deserializes_properly() -> None: - class D(document.InnerDoc): + class DocumentD(document.InnerDoc): lr = field.LongRange() - d: Any = D.from_opensearch({"lr": {"lt": 42}}, True) + d: Any = DocumentD.from_opensearch({"lr": {"lt": 42}}, True) assert isinstance(d.lr, Range) assert 40 in d.lr assert 47 not in d.lr @@ -166,15 +166,15 @@ def test_resolve_nested() -> None: def test_conflicting_mapping_raises_error_in_index_to_dict() -> None: - class A(document.Document): + class DocumentA(document.Document): name = field.Text() - class B(document.Document): + class DocumentB(document.Document): name = field.Keyword() i: Any = Index("i") - i.document(A) - i.document(B) + i.document(DocumentA) + i.document(DocumentB) with raises(ValueError): i.to_dict() @@ -192,11 +192,11 @@ def test_matches_uses_index() -> None: def test_matches_with_no_name_always_matches() -> None: - class D(document.Document): + class DocumentD(document.Document): pass - assert D._matches({}) - assert D._matches({"_index": "whatever"}) + assert DocumentD._matches({}) + assert DocumentD._matches({"_index": "whatever"}) def test_matches_accepts_wildcards() -> None: @@ -531,10 +531,10 @@ def test_document_inheritance() -> None: def test_child_class_can_override_parent() -> None: - class A(document.Document): + class DocumentA(document.Document): o = field.Object(dynamic=False, properties={"a": field.Text()}) - class B(A): + class DocumentB(DocumentA): o = field.Object(dynamic="strict", properties={"b": field.Text()}) assert { @@ -545,7 +545,7 @@ class B(A): "type": "object", } } - } == B._doc_type.mapping.to_dict() + } == DocumentB._doc_type.mapping.to_dict() def test_meta_fields_are_stored_in_meta_and_ignored_by_to_dict() -> None: diff --git a/test_opensearchpy/test_helpers/test_query.py b/test_opensearchpy/test_helpers/test_query.py index 27790748..dbda0b91 100644 --- a/test_opensearchpy/test_helpers/test_query.py +++ b/test_opensearchpy/test_helpers/test_query.py @@ -32,7 +32,7 @@ from opensearchpy.helpers import function, query -def test_empty_Q_is_match_all() -> None: +def test_empty_query_is_match_all() -> None: q = query.Q() assert isinstance(q, query.MatchAll) @@ -389,57 +389,57 @@ class MyQuery(query.Query): assert query.Query._classes["my_query"] is MyQuery -def test_Q_passes_query_through() -> None: +def test_query_passes_query_through() -> None: q = query.Match(f="value1") assert query.Q(q) is q -def test_Q_constructs_query_by_name() -> None: +def test_query_constructs_query_by_name() -> None: q = query.Q("match", f="value") assert isinstance(q, query.Match) assert {"f": "value"} == q._params -def test_Q_translates_double_underscore_to_dots_in_param_names() -> None: +def test_query_translates_double_underscore_to_dots_in_param_names() -> None: q = query.Q("match", comment__author="honza") assert {"comment.author": "honza"} == q._params -def test_Q_doesn_translate_double_underscore_to_dots_in_param_names() -> None: +def test_query_doesn_translate_double_underscore_to_dots_in_param_names() -> None: q = query.Q("match", comment__author="honza", _expand__to_dot=False) assert {"comment__author": "honza"} == q._params -def test_Q_constructs_simple_query_from_dict() -> None: +def test_query_constructs_simple_query_from_dict() -> None: q = query.Q({"match": {"f": "value"}}) assert isinstance(q, query.Match) assert {"f": "value"} == q._params -def test_Q_constructs_compound_query_from_dict() -> None: +def test_query_constructs_compound_query_from_dict() -> None: q = query.Q({"bool": {"must": [{"match": {"f": "value"}}]}}) assert q == query.Bool(must=[query.Match(f="value")]) -def test_Q_raises_error_when_passed_in_dict_and_params() -> None: +def test_query_raises_error_when_passed_in_dict_and_params() -> None: with raises(Exception): query.Q({"match": {"f": "value"}}, f="value") -def test_Q_raises_error_when_passed_in_query_and_params() -> None: +def test_query_raises_error_when_passed_in_query_and_params() -> None: q = query.Match(f="value1") with raises(Exception): query.Q(q, f="value") -def test_Q_raises_error_on_unknown_query() -> None: +def test_query_raises_error_on_unknown_query() -> None: with raises(Exception): query.Q("not a query", f="value") diff --git a/test_opensearchpy/test_helpers/test_utils.py b/test_opensearchpy/test_helpers/test_utils.py index b6949833..d6139826 100644 --- a/test_opensearchpy/test_helpers/test_utils.py +++ b/test_opensearchpy/test_helpers/test_utils.py @@ -91,7 +91,7 @@ def test_attrlist_items_get_wrapped_during_iteration() -> None: assert isinstance(ls[3], utils.AttrDict) -def test_serializer_deals_with_Attr_versions() -> None: +def test_serializer_deals_with_attr_versions() -> None: d = utils.AttrDict({"key": utils.AttrList([1, 2, 3])}) assert serializer.serializer.dumps(d) == serializer.serializer.dumps( diff --git a/test_opensearchpy/TestHttpServer.py b/test_opensearchpy/test_http_server.py similarity index 82% rename from test_opensearchpy/TestHttpServer.py rename to test_opensearchpy/test_http_server.py index 3d8b31fb..844696ef 100644 --- a/test_opensearchpy/TestHttpServer.py +++ b/test_opensearchpy/test_http_server.py @@ -15,7 +15,9 @@ class TestHTTPRequestHandler(BaseHTTPRequestHandler): - def do_GET(self) -> None: + __test__ = False + + def do_GET(self) -> None: # pylint: disable=invalid-name headers = self.headers if self.path == "/redirect": @@ -28,14 +30,14 @@ def do_GET(self) -> None: self.end_headers() - Headers = {} + capitalized_headers = {} for header, value in headers.items(): capitalized_header = "-".join([word.title() for word in header.split("-")]) - Headers.update({capitalized_header: value}) - if "Connection" in Headers: - Headers.pop("Connection") + capitalized_headers.update({capitalized_header: value}) + if "Connection" in capitalized_headers: + capitalized_headers.pop("Connection") - data = {"method": "GET", "headers": Headers} + data = {"method": "GET", "headers": capitalized_headers} self.wfile.write(json.dumps(data).encode("utf-8")) diff --git a/test_opensearchpy/test_server/test_rest_api_spec.py b/test_opensearchpy/test_server/test_rest_api_spec.py index 3249f41b..f5c6d8c7 100644 --- a/test_opensearchpy/test_server/test_rest_api_spec.py +++ b/test_opensearchpy/test_server/test_rest_api_spec.py @@ -469,68 +469,75 @@ def sync_runner(sync_client: Any) -> Any: YAML_TEST_SPECS = [] -# Try loading the REST API test specs from the Elastic Artifacts API -try: - # Construct the HTTP and OpenSearch client - http = urllib3.PoolManager(retries=10) - client = get_client() - - package_url = "https://github.com/opensearch-project/OpenSearch/archive/main.zip" - - # Download the zip and start reading YAML from the files in memory - package_zip = zipfile.ZipFile(io.BytesIO(http.request("GET", package_url).data)) - for yaml_file in package_zip.namelist(): - if not re.match( - r"^OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/.*\.ya?ml$", - yaml_file, - ): - continue - yaml_tests = list(yaml.safe_load_all(package_zip.read(yaml_file))) - - # Each file may have a "test" named 'setup' or 'teardown', - # these sets of steps should be run at the beginning and end - # of every other test within the file so we do one pass to capture those. - setup_steps = teardown_steps = None - test_numbers_and_steps = [] - test_number = 0 - - for yaml_test in yaml_tests: - test_name, test_step = yaml_test.popitem() - if test_name == "setup": - setup_steps = test_step - elif test_name == "teardown": - teardown_steps = test_step - else: - test_numbers_and_steps.append((test_number, test_step)) - test_number += 1 - - # Now we combine setup, teardown, and test_steps into - # a set of pytest.param() instances - for test_number, test_step in test_numbers_and_steps: - # Build the id from the name of the YAML file and - # the number within that file. Most important step - # is to remove most of the file path prefixes and - # the .yml suffix. - pytest_test_name = yaml_file.rpartition(".")[0].replace(".", "/") - for prefix in ("rest-api-spec/", "test/", "oss/"): - if pytest_test_name.startswith(prefix): - pytest_test_name = pytest_test_name[len(prefix) :] - pytest_param_id = "%s[%d]" % (pytest_test_name, test_number) - - pytest_param = { - "setup": setup_steps, - "run": test_step, - "teardown": teardown_steps, - } - # Skip either 'test_name' or 'test_name[x]' - if pytest_test_name in SKIP_TESTS or pytest_param_id in SKIP_TESTS: - pytest_param["skip"] = True - - YAML_TEST_SPECS.append(pytest.param(pytest_param, id=pytest_param_id)) - -except Exception as e: - warnings.warn("Could not load REST API tests: %s" % (str(e),)) +client = get_client() + + +def load_rest_api_tests() -> None: + # Try loading the REST API test specs from OpenSearch core. + try: + # Construct the HTTP and OpenSearch client + http = urllib3.PoolManager(retries=10) + package_url = ( + "https://github.com/opensearch-project/OpenSearch/archive/main.zip" + ) + + # Download the zip and start reading YAML from the files in memory + package_zip = zipfile.ZipFile(io.BytesIO(http.request("GET", package_url).data)) + for yaml_file in package_zip.namelist(): + if not re.match( + r"^OpenSearch-main/rest-api-spec/src/main/resources/rest-api-spec/test/.*\.ya?ml$", + yaml_file, + ): + continue + yaml_tests = list(yaml.safe_load_all(package_zip.read(yaml_file))) + + # Each file may have a "test" named 'setup' or 'teardown', + # these sets of steps should be run at the beginning and end + # of every other test within the file so we do one pass to capture those. + setup_steps = teardown_steps = None + test_numbers_and_steps = [] + test_number = 0 + + for yaml_test in yaml_tests: + test_name, test_step = yaml_test.popitem() + if test_name == "setup": + setup_steps = test_step + elif test_name == "teardown": + teardown_steps = test_step + else: + test_numbers_and_steps.append((test_number, test_step)) + test_number += 1 + + # Now we combine setup, teardown, and test_steps into + # a set of pytest.param() instances + for test_number, test_step in test_numbers_and_steps: + # Build the id from the name of the YAML file and + # the number within that file. Most important step + # is to remove most of the file path prefixes and + # the .yml suffix. + pytest_test_name = yaml_file.rpartition(".")[0].replace(".", "/") + for prefix in ("rest-api-spec/", "test/", "oss/"): + if pytest_test_name.startswith(prefix): + pytest_test_name = pytest_test_name[len(prefix) :] + pytest_param_id = "%s[%d]" % (pytest_test_name, test_number) + + pytest_param = { + "setup": setup_steps, + "run": test_step, + "teardown": teardown_steps, + } + # Skip either 'test_name' or 'test_name[x]' + if pytest_test_name in SKIP_TESTS or pytest_param_id in SKIP_TESTS: + pytest_param["skip"] = True + + YAML_TEST_SPECS.append(pytest.param(pytest_param, id=pytest_param_id)) + + except Exception as e: + warnings.warn("Could not load REST API tests: %s" % (str(e),)) + + +load_rest_api_tests() if not RUN_ASYNC_REST_API_TESTS: diff --git a/test_opensearchpy/test_transport.py b/test_opensearchpy/test_transport.py index 4b37e3ac..e299e23f 100644 --- a/test_opensearchpy/test_transport.py +++ b/test_opensearchpy/test_transport.py @@ -82,7 +82,7 @@ def perform_request(self, *args: Any, **kwargs: Any) -> Any: } }""" -CLUSTER_NODES_7x_PUBLISH_HOST = """{ +CLUSTER_NODES_7X_PUBLISH_HOST = """{ "_nodes" : { "total" : 1, "successful" : 1, @@ -264,7 +264,7 @@ def test_add_connection(self) -> None: "http://google.com:1234", t.connection_pool.connections[1].host ) - def test_request_will_fail_after_X_retries(self) -> None: + def test_request_will_fail_after_x_retries(self) -> None: t: Any = Transport( [{"exception": ConnectionError(None, "abandon ship", Exception())}], connection_class=DummyConnection, @@ -408,7 +408,7 @@ def test_sniff_7x_publish_host(self) -> None: # Test the response shaped when a 7.x node has publish_host set # and the returend data is shaped in the fqdn/ip:port format. t: Any = Transport( - [{"data": CLUSTER_NODES_7x_PUBLISH_HOST}], + [{"data": CLUSTER_NODES_7X_PUBLISH_HOST}], connection_class=DummyConnection, sniff_timeout=42, ) diff --git a/utils/build-dists.py b/utils/build_dists.py similarity index 89% rename from utils/build-dists.py rename to utils/build_dists.py index bca9c154..137542b4 100644 --- a/utils/build-dists.py +++ b/utils/build_dists.py @@ -40,25 +40,25 @@ import tempfile from typing import Any -base_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) -tmp_dir = None +BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +TMP_DIR = None @contextlib.contextmanager # type: ignore def set_tmp_dir() -> None: - global tmp_dir - tmp_dir = tempfile.mkdtemp() - yield tmp_dir - shutil.rmtree(tmp_dir) - tmp_dir = None + global TMP_DIR + TMP_DIR = tempfile.mkdtemp() + yield TMP_DIR + shutil.rmtree(TMP_DIR) + TMP_DIR = None def run(*argv: Any, expect_exit_code: int = 0) -> None: - global tmp_dir - if tmp_dir is None: - os.chdir(base_dir) + global TMP_DIR + if TMP_DIR is None: + os.chdir(BASE_DIR) else: - os.chdir(tmp_dir) + os.chdir(TMP_DIR) cmd = " ".join(shlex.quote(x) for x in argv) print("$ " + cmd) @@ -132,7 +132,7 @@ def test_dist(dist: Any) -> None: "-m", "mypy", "--strict", - os.path.join(base_dir, "test_opensearchpy/test_types/async_types.py"), + os.path.join(BASE_DIR, "test_opensearchpy/test_types/async_types.py"), ) # Ensure that the namespaces are correct for the dist @@ -153,7 +153,7 @@ def test_dist(dist: Any) -> None: "-m", "mypy", "--strict", - os.path.join(base_dir, "test_opensearchpy/test_types/sync_types.py"), + os.path.join(BASE_DIR, "test_opensearchpy/test_types/sync_types.py"), ) else: run( @@ -161,7 +161,7 @@ def test_dist(dist: Any) -> None: "-m", "mypy", "--strict", - os.path.join(base_dir, "test_opensearchpy/test_types/aliased_types.py"), + os.path.join(BASE_DIR, "test_opensearchpy/test_types/aliased_types.py"), ) # Uninstall the dist, see that we can't import things anymore @@ -187,7 +187,7 @@ def main() -> None: run("python", "setup.py", "sdist", "bdist_wheel") # Grab the major version to be used as a suffix. - version_path = os.path.join(base_dir, "opensearchpy/_version.py") + version_path = os.path.join(BASE_DIR, "opensearchpy/_version.py") with open(version_path) as f: data = f.read() m = re.search(r"^__versionstr__: str\s+=\s+[\"\']([^\"\']+)[\"\']", data, re.M) @@ -249,12 +249,12 @@ def main() -> None: # Rename the module to fit the suffix. shutil.move( - os.path.join(base_dir, "opensearchpy"), - os.path.join(base_dir, "opensearchpy%s" % suffix), + os.path.join(BASE_DIR, "opensearchpy"), + os.path.join(BASE_DIR, "opensearchpy%s" % suffix), ) # Ensure that the version within 'opensearchpy/_version.py' is correct. - version_path = os.path.join(base_dir, f"opensearchpy{suffix}/_version.py") + version_path = os.path.join(BASE_DIR, f"opensearchpy{suffix}/_version.py") with open(version_path) as f: version_data = f.read() version_data = re.sub( @@ -267,16 +267,16 @@ def main() -> None: f.write(version_data) # Rewrite setup.py with the new name. - setup_py_path = os.path.join(base_dir, "setup.py") + setup_py_path = os.path.join(BASE_DIR, "setup.py") with open(setup_py_path) as f: setup_py = f.read() with open(setup_py_path, "w") as f: f.truncate() - assert 'package_name = "opensearch-py"' in setup_py + assert 'PACKAGE_NAME = "opensearch-py"' in setup_py f.write( setup_py.replace( - 'package_name = "opensearch-py"', - 'package_name = "opensearch-py%s"' % suffix, + 'PACKAGE_NAME = "opensearch-py"', + 'PACKAGE_NAME = "opensearch-py%s"' % suffix, ) ) @@ -289,10 +289,10 @@ def main() -> None: run("rm", "-rf", "opensearchpy%s/" % suffix) # Test everything that got created - dists = os.listdir(os.path.join(base_dir, "dist")) + dists = os.listdir(os.path.join(BASE_DIR, "dist")) assert len(dists) == 4 for dist in dists: - test_dist(os.path.join(base_dir, "dist", dist)) + test_dist(os.path.join(BASE_DIR, "dist", dist)) os.system("chmod a+w dist/*") # After this run 'python -m twine upload dist/*' diff --git a/utils/generate-api.py b/utils/generate_api.py similarity index 94% rename from utils/generate-api.py rename to utils/generate_api.py index 792446dd..502cd2af 100644 --- a/utils/generate-api.py +++ b/utils/generate_api.py @@ -13,10 +13,10 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. # -# Licensed to Elasticsearch B.V. under one or more contributor +# Licensed to Elasticsearch b.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under +# ownership. Elasticsearch b.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -144,11 +144,11 @@ def dump(self) -> None: # This code snippet adds headers to each generated module indicating that the code is generated. header_separator = "# -----------------------------------------------------" - License_header_end_1 = "# GitHub history for details." - License_header_end_2 = "# under the License." + license_header_end_1 = "# GitHub history for details." + license_header_end_2 = "# under the License." update_header = True - License_position = 0 + license_position = 0 # Identifying the insertion point for the "THIS CODE IS GENERATED" header. if os.path.exists(self.filepath): @@ -160,20 +160,20 @@ def dump(self) -> None: content.find(header_separator) + len(header_separator) + 2 ) header_position = content.rfind("\n", 0, header_end_position) + 1 - if License_header_end_1 in content: - if License_header_end_2 in content: + if license_header_end_1 in content: + if license_header_end_2 in content: position = ( - content.find(License_header_end_2) - + len(License_header_end_2) + content.find(license_header_end_2) + + len(license_header_end_2) + 2 ) else: position = ( - content.find(License_header_end_1) - + len(License_header_end_1) + content.find(license_header_end_1) + + len(license_header_end_1) + 2 ) - License_position = content.rfind("\n", 0, position) + 1 + license_position = content.rfind("\n", 0, position) + 1 current_script_folder = os.path.dirname(os.path.abspath(__file__)) generated_file_header_path = os.path.join( @@ -190,12 +190,12 @@ def dump(self) -> None: with open(self.filepath, "w") as f: if update_header is True: f.write( - self.header[:License_position] + self.header[:license_position] + "\n" + header_content + "\n\n" + "#replace_token#\n" - + self.header[License_position:] + + self.header[license_position:] ) else: f.write( @@ -470,21 +470,21 @@ def read_modules() -> Any: parts_new = {} for m in params: - A = dict(type=m["schema"]["type"], description=m["description"]) + a = dict(type=m["schema"]["type"], description=m["description"]) if "default" in m["schema"]: - A.update({"default": m["schema"]["default"]}) + a.update({"default": m["schema"]["default"]}) if "enum" in m["schema"]: - A.update({"type": "enum"}) - A.update({"options": m["schema"]["enum"]}) + a.update({"type": "enum"}) + a.update({"options": m["schema"]["enum"]}) if "deprecated" in m["schema"]: - A.update({"deprecated": m["schema"]["deprecated"]}) - A.update( + a.update({"deprecated": m["schema"]["deprecated"]}) + a.update( {"deprecation_message": m["schema"]["x-deprecation-message"]} ) - params_new.update({m["name"]: A}) + params_new.update({m["name"]: a}) # Removing the deprecated "type" if p["x-operation-group"] != "nodes.hot_threads" and "type" in params_new: @@ -502,17 +502,17 @@ def read_modules() -> Any: p.pop("parameters") for n in parts: - B = dict(type=n["schema"]["type"]) + b = dict(type=n["schema"]["type"]) if "description" in n: - B.update({"description": n["description"]}) + b.update({"description": n["description"]}) if "x-enum-options" in n["schema"]: - B.update({"options": n["schema"]["x-enum-options"]}) + b.update({"options": n["schema"]["x-enum-options"]}) deprecated_new = {} if "deprecated" in n: - B.update({"deprecated": n["deprecated"]}) + b.update({"deprecated": n["deprecated"]}) if "x-deprecation-version" in n: deprecated_new.update({"version": n["x-deprecation-version"]}) @@ -522,7 +522,7 @@ def read_modules() -> Any: {"description": n["x-deprecation-description"]} ) - parts_new.update({n["name"]: B}) + parts_new.update({n["name"]: b}) if bool(parts_new): p.update({"parts": parts_new}) diff --git a/utils/generated_file_headers.txt b/utils/generated_file_headers.txt index 135828ce..6bcf8bcb 100644 --- a/utils/generated_file_headers.txt +++ b/utils/generated_file_headers.txt @@ -2,7 +2,7 @@ # THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. # # To contribute, kindly make essential modifications through either the "opensearch-py client generator": -# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate_api.py # or the "OpenSearch API specification" available at: # https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json # ----------------------------------------------------- diff --git a/utils/license-headers.py b/utils/license_headers.py similarity index 90% rename from utils/license-headers.py rename to utils/license_headers.py index e0f31b59..903f176d 100644 --- a/utils/license-headers.py +++ b/utils/license_headers.py @@ -18,9 +18,9 @@ import sys from typing import Iterator, List -lines_to_keep = ["# -*- coding: utf-8 -*-", "#!/usr/bin/env python"] +LINES_TO_KEEP = ["# -*- coding: utf-8 -*-", "#!/usr/bin/env python"] -license_header = """ +LICENSE_HEADER = """ # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -54,14 +54,14 @@ def does_file_need_fix(filepath: str) -> bool: with open(filepath, mode="r") as f: for line in f: line = line.strip() - if len(line) == 0 or line in lines_to_keep: + if len(line) == 0 or line in LINES_TO_KEEP: pass elif line[0] == "#": existing_header += line existing_header += "\n" else: break - return not existing_header.startswith(license_header) + return not existing_header.startswith(LICENSE_HEADER) def add_header_to_file(filepath: str) -> None: @@ -69,9 +69,9 @@ def add_header_to_file(filepath: str) -> None: lines = list(f) i = 0 for i, line in enumerate(lines): - if len(line) > 0 and line not in lines_to_keep: + if len(line) > 0 and line not in LINES_TO_KEEP: break - lines = lines[:i] + [license_header] + lines[i:] + lines = lines[:i] + [LICENSE_HEADER] + lines[i:] with open(filepath, mode="w") as f: f.truncate() f.write("".join(lines))